Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] #14803 State changes for Virtual Mega Map #17689

Draft
wants to merge 13 commits into
base: 16785-D-virtualmap-keybytes-valueobjects
Choose a base branch
from
Draft
Original file line number Diff line number Diff line change
@@ -1,54 +1,6 @@
// SPDX-License-Identifier: Apache-2.0
package com.hedera.node.app.blocks.impl;

import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_ACCOUNTS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_ALIASES;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_BLOCK_INFO;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_BLOCK_STREAM_INFO;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_CONGESTION_STARTS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_CONTRACT_BYTECODE;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_CONTRACT_STORAGE;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_ENTITY_ID;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_FILES;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_FREEZE_TIME;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_MIDNIGHT_RATES;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_NETWORK_REWARDS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_NFTS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_NODES;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_PENDING_AIRDROPS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_PLATFORM_STATE;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_ROSTERS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_ROSTER_STATE;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_RUNNING_HASHES;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_SCHEDULED_COUNTS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_SCHEDULED_ORDERS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_SCHEDULED_USAGES;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_SCHEDULES_BY_EQUALITY;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_SCHEDULES_BY_EXPIRY;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_SCHEDULES_BY_ID;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_SCHEDULE_ID_BY_EQUALITY;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_STAKING_INFO;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_THROTTLE_USAGE;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_TOKENS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_TOKEN_RELATIONS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_TOPICS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_TRANSACTION_RECEIPTS_QUEUE;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_TSS_ENCRYPTION_KEYS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_TSS_MESSAGES;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_TSS_STATUS;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_TSS_VOTES;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_150;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_151;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_152;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_153;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_154;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_155;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_156;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_157;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_158;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_DATA_159;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_FILE;
import static com.hedera.hapi.block.stream.output.StateIdentifier.STATE_ID_UPGRADE_FILE_HASH;
import static com.hedera.node.app.records.impl.BlockRecordInfoUtils.HASH_SIZE;

import com.hedera.pbj.runtime.io.buffer.Bytes;
Expand All @@ -61,7 +13,6 @@
* Utility methods for block implementation.
*/
public class BlockImplUtils {
private static final int UNKNOWN_STATE_ID = -1;

/**
* Prevent instantiation
Expand All @@ -70,131 +21,6 @@ private BlockImplUtils() {
throw new UnsupportedOperationException("Utility Class");
}

/**
* Returns the state id for the given service and state key.
*
* @param serviceName the service name
* @param stateKey the state key
* @return the state id
*/
public static int stateIdFor(@NonNull final String serviceName, @NonNull final String stateKey) {
final var stateId =
switch (serviceName) {
case "AddressBookService" -> switch (stateKey) {
case "NODES" -> STATE_ID_NODES.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "BlockRecordService" -> switch (stateKey) {
case "BLOCKS" -> STATE_ID_BLOCK_INFO.protoOrdinal();
case "RUNNING_HASHES" -> STATE_ID_RUNNING_HASHES.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "BlockStreamService" -> switch (stateKey) {
case "BLOCK_STREAM_INFO" -> STATE_ID_BLOCK_STREAM_INFO.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "CongestionThrottleService" -> switch (stateKey) {
case "CONGESTION_LEVEL_STARTS" -> STATE_ID_CONGESTION_STARTS.protoOrdinal();
case "THROTTLE_USAGE_SNAPSHOTS" -> STATE_ID_THROTTLE_USAGE.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "ConsensusService" -> switch (stateKey) {
case "TOPICS" -> STATE_ID_TOPICS.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "ContractService" -> switch (stateKey) {
case "BYTECODE" -> STATE_ID_CONTRACT_BYTECODE.protoOrdinal();
case "STORAGE" -> STATE_ID_CONTRACT_STORAGE.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "EntityIdService" -> switch (stateKey) {
case "ENTITY_ID" -> STATE_ID_ENTITY_ID.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "FeeService" -> switch (stateKey) {
case "MIDNIGHT_RATES" -> STATE_ID_MIDNIGHT_RATES.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "FileService" -> switch (stateKey) {
case "FILES" -> STATE_ID_FILES.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=150]]" -> STATE_ID_UPGRADE_DATA_150
.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=151]]" -> STATE_ID_UPGRADE_DATA_151
.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=152]]" -> STATE_ID_UPGRADE_DATA_152
.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=153]]" -> STATE_ID_UPGRADE_DATA_153
.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=154]]" -> STATE_ID_UPGRADE_DATA_154
.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=155]]" -> STATE_ID_UPGRADE_DATA_155
.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=156]]" -> STATE_ID_UPGRADE_DATA_156
.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=157]]" -> STATE_ID_UPGRADE_DATA_157
.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=158]]" -> STATE_ID_UPGRADE_DATA_158
.protoOrdinal();
case "UPGRADE_DATA[FileID[shardNum=0, realmNum=0, fileNum=159]]" -> STATE_ID_UPGRADE_DATA_159
.protoOrdinal();
case "UPGRADE_FILE" -> STATE_ID_UPGRADE_FILE.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "FreezeService" -> switch (stateKey) {
case "FREEZE_TIME" -> STATE_ID_FREEZE_TIME.protoOrdinal();
case "UPGRADE_FILE_HASH" -> STATE_ID_UPGRADE_FILE_HASH.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "PlatformStateService" -> switch (stateKey) {
case "PLATFORM_STATE" -> STATE_ID_PLATFORM_STATE.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "RecordCache" -> switch (stateKey) {
case "TransactionReceiptQueue" -> STATE_ID_TRANSACTION_RECEIPTS_QUEUE.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "RosterService" -> switch (stateKey) {
case "ROSTERS" -> STATE_ID_ROSTERS.protoOrdinal();
case "ROSTER_STATE" -> STATE_ID_ROSTER_STATE.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "ScheduleService" -> switch (stateKey) {
case "SCHEDULES_BY_EQUALITY" -> STATE_ID_SCHEDULES_BY_EQUALITY.protoOrdinal();
case "SCHEDULES_BY_EXPIRY_SEC" -> STATE_ID_SCHEDULES_BY_EXPIRY.protoOrdinal();
case "SCHEDULES_BY_ID" -> STATE_ID_SCHEDULES_BY_ID.protoOrdinal();
case "SCHEDULE_ID_BY_EQUALITY" -> STATE_ID_SCHEDULE_ID_BY_EQUALITY.protoOrdinal();
case "SCHEDULED_COUNTS" -> STATE_ID_SCHEDULED_COUNTS.protoOrdinal();
case "SCHEDULED_ORDERS" -> STATE_ID_SCHEDULED_ORDERS.protoOrdinal();
case "SCHEDULED_USAGES" -> STATE_ID_SCHEDULED_USAGES.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "TokenService" -> switch (stateKey) {
case "ACCOUNTS" -> STATE_ID_ACCOUNTS.protoOrdinal();
case "ALIASES" -> STATE_ID_ALIASES.protoOrdinal();
case "NFTS" -> STATE_ID_NFTS.protoOrdinal();
case "PENDING_AIRDROPS" -> STATE_ID_PENDING_AIRDROPS.protoOrdinal();
case "STAKING_INFOS" -> STATE_ID_STAKING_INFO.protoOrdinal();
case "STAKING_NETWORK_REWARDS" -> STATE_ID_NETWORK_REWARDS.protoOrdinal();
case "TOKEN_RELS" -> STATE_ID_TOKEN_RELATIONS.protoOrdinal();
case "TOKENS" -> STATE_ID_TOKENS.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
case "TssBaseService" -> switch (stateKey) {
case "TSS_MESSAGES" -> STATE_ID_TSS_MESSAGES.protoOrdinal();
case "TSS_VOTES" -> STATE_ID_TSS_VOTES.protoOrdinal();
case "TSS_ENCRYPTION_KEYS" -> STATE_ID_TSS_ENCRYPTION_KEYS.protoOrdinal();
case "TSS_STATUS" -> STATE_ID_TSS_STATUS.protoOrdinal();
default -> UNKNOWN_STATE_ID;
};
default -> UNKNOWN_STATE_ID;
};
if (stateId == UNKNOWN_STATE_ID) {
throw new IllegalArgumentException("Unknown state '" + serviceName + "." + stateKey + "'");
} else {
return stateId;
}
}

/**
* Appends the given hash to the given hashes. If the number of hashes exceeds the given maximum, the oldest hash
* is removed.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import com.hedera.hapi.platform.state.PlatformState;
import com.hedera.pbj.runtime.OneOf;
import com.swirlds.state.StateChangeListener;
import com.swirlds.state.merkle.StateUtils;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.time.Instant;
Expand Down Expand Up @@ -138,7 +139,7 @@ public Set<StateType> stateTypes() {

@Override
public int stateIdFor(@NonNull final String serviceName, @NonNull final String stateKey) {
return BlockImplUtils.stateIdFor(serviceName, stateKey);
return StateUtils.stateIdFor(serviceName, stateKey);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import com.swirlds.state.lifecycle.StartupNetworks;
import com.swirlds.state.lifecycle.info.NetworkInfo;
import com.swirlds.state.merkle.MerkleStateRoot;
import com.swirlds.state.merkle.NewStateRoot;
import com.swirlds.state.spi.FilteredWritableStates;
import com.swirlds.state.spi.ReadableStates;
import com.swirlds.state.spi.WritableStates;
Expand Down Expand Up @@ -72,11 +73,11 @@ public long newEntityNum() {
@Override
public void copyAndReleaseOnDiskState(@NonNull final String stateKey) {
requireNonNull(stateKey);
if (newStates instanceof MerkleStateRoot.MerkleWritableStates merkleWritableStates) {
if (newStates instanceof NewStateRoot.MerkleWritableStates merkleWritableStates) {
merkleWritableStates.copyAndReleaseVirtualMap(stateKey);
} else if (newStates instanceof FilteredWritableStates filteredWritableStates
&& filteredWritableStates.getDelegate()
instanceof MerkleStateRoot.MerkleWritableStates merkleWritableStates) {
instanceof NewStateRoot.MerkleWritableStates merkleWritableStates) {
merkleWritableStates.copyAndReleaseVirtualMap(stateKey);
} else {
throw new UnsupportedOperationException("On-disk state is inaccessible");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import com.swirlds.state.lifecycle.StartupNetworks;
import com.swirlds.state.lifecycle.info.NetworkInfo;
import com.swirlds.state.merkle.MerkleStateRoot;
import com.swirlds.state.merkle.NewStateRoot;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.HashMap;
Expand Down Expand Up @@ -153,7 +154,7 @@ public List<StateChanges.Builder> doMigrations(
startupNetworks);
// Now commit any changes that were made to the entity ID state (since other service entities could
// depend on newly-generated entity IDs)
if (entityIdWritableStates instanceof MerkleStateRoot.MerkleWritableStates mws) {
if (entityIdWritableStates instanceof NewStateRoot.MerkleWritableStates mws) {
mws.commit();
migrationStateChanges.trackCommit();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import com.swirlds.state.lifecycle.StateDefinition;
import com.swirlds.state.lifecycle.info.NetworkInfo;
import com.swirlds.state.merkle.MerkleStateRoot;
import com.swirlds.state.merkle.NewStateRoot;
import com.swirlds.state.merkle.StateMetadata;
import com.swirlds.state.merkle.StateUtils;
import com.swirlds.state.merkle.queue.QueueNode;
Expand Down Expand Up @@ -205,8 +206,8 @@ public void migrate(
if (isSoOrdered(currentVersion, previousVersion)) {
throw new IllegalArgumentException("The currentVersion must be at least the previousVersion");
}
if (!(state instanceof MerkleStateRoot stateRoot)) {
throw new IllegalArgumentException("The state must be an instance of " + MerkleStateRoot.class.getName());
if (!(state instanceof NewStateRoot stateRoot)) {
throw new IllegalArgumentException("The state must be an instance of " + NewStateRoot.class.getName());
}
final long roundNumber = PLATFORM_STATE_SERVICE.roundOf(stateRoot);
if (schemas.isEmpty()) {
Expand Down Expand Up @@ -274,7 +275,7 @@ && alreadyIncludesStateDefs(previousVersion, s.getVersion()))
schema.restart(migrationContext);
}
// Now commit all the service-specific changes made during this service's update or migration
if (writableStates instanceof MerkleStateRoot.MerkleWritableStates mws) {
if (writableStates instanceof NewStateRoot.MerkleWritableStates mws) {
mws.commit();
migrationStateChanges.trackCommit();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,7 @@ private void initServices(Schema schemaV1, MerkleStateRoot<?> loadedTree) {
new HashMap<>(),
migrationStateChanges,
startupNetworks);
loadedTree.migrate(MerkleStateRoot.CURRENT_VERSION);
loadedTree.migrate(CONFIGURATION, MerkleStateRoot.CURRENT_VERSION);
}

private PlatformMerkleStateRoot createMerkleHederaState(Schema schemaV1) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,13 +159,13 @@ public boolean childHasExpectedType(final int index, final long childClassId) {
}

@Override
public MerkleNode migrate(int version) {
public MerkleNode migrate(@NonNull Configuration configuration, int version) {
if (version == ClassVersion.VIRTUAL_MAP) {
FAKE_MERKLE_STATE_LIFECYCLES.initRosterState(this);
return this;
}

return super.migrate(version);
return super.migrate(configuration, version);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@
import com.swirlds.common.merkle.MerkleInternal;
import com.swirlds.common.merkle.MerkleNode;
import com.swirlds.common.merkle.impl.PartialNaryMerkleInternal;
import com.swirlds.config.api.Configuration;
import edu.umd.cs.findbugs.annotations.NonNull;

import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiFunction;

Expand Down Expand Up @@ -133,7 +136,7 @@ public BenchmarkMerkleInternal copy() {
* in position 0, and that leaf will equal this node.
*/
@Override
public MerkleNode migrate(final int version) {
public MerkleNode migrate(@NonNull Configuration configuration, final int version) {
return migrationMapper.apply(this, version);
}
}
Loading
Loading