Skip to content

Commit

Permalink
Merge branch 'develop' into 9728-address-account-nonce-discrepancies-…
Browse files Browse the repository at this point in the history
…mono
  • Loading branch information
natanasow committed Feb 7, 2024
2 parents 11f6393 + 2258784 commit dbb8b4e
Show file tree
Hide file tree
Showing 152 changed files with 3,079 additions and 4,331 deletions.
2 changes: 1 addition & 1 deletion hedera-dependency-versions/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ moduleInfo {
version("com.google.jimfs", "1.2")
version("com.google.protobuf", protobufVersion)
version("com.google.protobuf.util", protobufVersion)
version("com.hedera.pbj.runtime", "0.7.14")
version("com.hedera.pbj.runtime", "0.7.19")
version("com.squareup.javapoet", "1.13.0")
version("com.sun.jna", "5.12.1")
version("dagger", daggerVersion)
Expand Down
7 changes: 0 additions & 7 deletions hedera-node/config.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
###############################################################################################
swirld, 123

# app, HashgraphDemo.jar, 1,0,0,0,0,0,0,0,0,0, all
# app, GameDemo.jar, 9000, 9000
# app, HelloSwirldDemo.jar
# app, CryptocurrencyDemo.jar
Expand Down Expand Up @@ -72,12 +71,6 @@ nextNodeId, 1
#
# FilesystemDemo.jar parameters: none
#
# HashGraphDemo.jar takes parameters that give the initial checkbox settings,
# in the same order they appear are on the screen, with 1 to check it
# and 0 to not check it, followed by the number of events to display
# (or “all”). The first parameter controls whether it runs
# slowly (1) or runs at full speed (0).
#
# GameDemo.jar parameters:
# height: height of the board (in cells). Player moves 1 cell at a time.
# width: width of the board (in cells). Player moves 1 cell at a time.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import java.util.List;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Predicate;

/** Provides helpers to compare and analyze record streams. */
Expand All @@ -50,7 +49,7 @@ private OrderedComparison() {
* @param firstStreamDir the first record stream
* @param secondStreamDir the second record stream
* @param recordDiffSummarizer if present, a summarizer for record diffs
* @param fileNameObserver if set, a consumer receiving the name of each file as it is parsed
* @param maybeInclusionTest if set, a consumer receiving the name of each file as it is parsed
* @return the stream diff
* @throws IOException if any of the record stream files cannot be read or parsed
* @throws IllegalArgumentException if the directories contain misaligned record streams
Expand All @@ -59,22 +58,38 @@ public static List<DifferingEntries> findDifferencesBetweenV6(
@NonNull final String firstStreamDir,
@NonNull final String secondStreamDir,
@Nullable final RecordDiffSummarizer recordDiffSummarizer,
@Nullable final Consumer<String> fileNameObserver)
@Nullable final Predicate<String> maybeInclusionTest,
@Nullable final String maybeInclusionDescription)
throws IOException {
final Predicate<String> watchingPredicate = f -> {
if (fileNameObserver != null) {
fileNameObserver.accept(f);
}
return true;
};
System.out.println("Parsing stream @ " + firstStreamDir);
final var firstEntries = parseV6RecordStreamEntriesIn(firstStreamDir, watchingPredicate);
final Predicate<String> inclusionTest = maybeInclusionTest == null ? f -> true : maybeInclusionTest;
final String inclusionDescription = maybeInclusionDescription == null ? "all" : maybeInclusionDescription;
System.out.println("Parsing stream @ " + firstStreamDir + "(including " + inclusionDescription + ")");
final var firstEntries = parseV6RecordStreamEntriesIn(firstStreamDir, inclusionTest);
System.out.println(" ➡️ Read " + firstEntries.size() + " entries");
System.out.println("Parsing stream @ " + secondStreamDir);
final var secondEntries = parseV6RecordStreamEntriesIn(secondStreamDir, watchingPredicate);
System.out.println("Parsing stream @ " + secondStreamDir + "(including " + inclusionDescription + ")");
final var secondEntries = parseV6RecordStreamEntriesIn(secondStreamDir, inclusionTest);
List<RecordStreamEntry> newSecondEntries = getNewSecondRecordStreamEntries(firstEntries, secondEntries);
System.out.println(" ➡️ Read " + secondEntries.size() + " entries");
// FUTURE: Add a step to align consensus times in the two streams when any record is missing
return diff(firstEntries, secondEntries, recordDiffSummarizer);
return diff(firstEntries, newSecondEntries, recordDiffSummarizer);
}

@NonNull
private static List<RecordStreamEntry> getNewSecondRecordStreamEntries(
List<RecordStreamEntry> firstEntries, List<RecordStreamEntry> secondEntries) {
List<RecordStreamEntry> ret = new ArrayList<>();
RecordStreamEntry firstEntry, secondEntry;
int secondIndex = 0;
for (RecordStreamEntry entry : firstEntries) {
firstEntry = entry;
secondEntry = secondEntries.get(secondIndex);
if (secondEntry.consensusTime().equals(firstEntry.consensusTime())) {
ret.add(secondEntry);
secondIndex++;
} else {
ret.add(new RecordStreamEntry(null, null, firstEntry.consensusTime()));
}
}
return ret;
}

public interface RecordDiffSummarizer extends BiFunction<TransactionRecord, TransactionRecord, String> {}
Expand All @@ -97,6 +112,14 @@ static List<DifferingEntries> diff(
for (int i = 0; i < minSize; i++) {
final var firstEntry = firstEntries.get(i);
try {
if (secondEntries.get(i).txnRecord() == null) {
diffs.add(new DifferingEntries(
firstEntry,
null,
"No record found at " + firstEntry.consensusTime() + " for transactionID : "
+ firstEntry.txnRecord().getTransactionID()));
continue;
}
final var secondEntry = entryWithMatchableRecord(secondEntries, i, firstEntry);
if (!firstEntry.txnRecord().equals(secondEntry.txnRecord())) {
final var summary = recordDiffSummarizer == null
Expand Down Expand Up @@ -154,9 +177,6 @@ private static RecordStreamEntry entryWithMatchableRecord(
@NonNull final List<RecordStreamEntry> entries, final int i, @NonNull final RecordStreamEntry entryToMatch)
throws UnmatchableException {
final var secondEntry = entries.get(i);
if (secondEntry == null) {
throw new UnmatchableException("No matching entry found for entry at position " + i);
}
if (!entryToMatch.consensusTime().equals(secondEntry.consensusTime())) {
throw new UnmatchableException("Entries at position "
+ i
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ void detectsDifferenceInCaseOfObviouslyWrongNonce() throws IOException {
final var issStreamLoc = WRONG_NONCE_STREAMS_DIR + File.separator + "node5";
final var consensusStreamLoc = WRONG_NONCE_STREAMS_DIR + File.separator + "node0";

final var diffs = findDifferencesBetweenV6(issStreamLoc, consensusStreamLoc, null, null);
final var diffs = findDifferencesBetweenV6(issStreamLoc, consensusStreamLoc, null, null, null);
assertEquals(1, diffs.size());
final var soleDiff = diffs.get(0);
final var issEntry = soleDiff.firstEntry();
Expand Down
2 changes: 1 addition & 1 deletion hedera-node/hapi/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ description = "Hedera API"

// Add downloaded HAPI repo protobuf files into build directory and add to sources to build them
tasks.cloneHederaProtobufs {
branchOrTag = "use-ContractID-in-SlotKey"
branchOrTag = "add-pbj-types-for-state"
// As long as the 'branchOrTag' above is not stable, run always:
outputs.upToDateWhen { false }
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import static com.hedera.node.app.state.merkle.MerkleSchemaRegistry.isSoOrdered;
import static com.hedera.node.app.throttle.ThrottleAccumulator.ThrottleType.BACKEND_THROTTLE;
import static com.hedera.node.app.throttle.ThrottleAccumulator.ThrottleType.FRONTEND_THROTTLE;
import static com.hedera.node.app.util.FileUtilities.observePropertiesAndPermissions;
import static com.hedera.node.app.util.HederaAsciiArt.HEDERA;
import static com.swirlds.platform.system.InitTrigger.EVENT_STREAM_RECOVERY;
import static com.swirlds.platform.system.InitTrigger.GENESIS;
Expand Down Expand Up @@ -1074,7 +1075,6 @@ private void initializeForTrigger(
// the various migration methods may depend on configuration to do their work
logger.info("Initializing Reconnect configuration");
this.configProvider = new ConfigProviderImpl(false);
logConfiguration();

logger.info("Initializing ThrottleManager");
this.throttleManager = new ThrottleManager();
Expand Down Expand Up @@ -1107,7 +1107,8 @@ private void initializeForTrigger(
initializeExchangeRateManager(state);
initializeFeeManager(state);
initializeThrottles(state);
// TODO We may need to update the config with the latest version in file 121
observePropertiesAndPermissions(state, configProvider.getConfiguration(), configProvider::update);
logConfiguration();
}

/*==================================================================================================================
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,23 @@

package com.hedera.node.app.bbm;

import static com.hedera.node.app.bbm.associations.TokenAssociationsDumpUtils.dumpModTokenRelations;
import static com.hedera.node.app.bbm.associations.TokenAssociationsDumpUtils.dumpMonoTokenRelations;
import static com.hedera.node.app.bbm.nfts.UniqueTokenDumpUtils.dumpModUniqueTokens;
import static com.hedera.node.app.bbm.nfts.UniqueTokenDumpUtils.dumpMonoUniqueTokens;
import static com.hedera.node.app.records.BlockRecordService.BLOCK_INFO_STATE_KEY;
import static com.hedera.node.app.service.mono.state.migration.StateChildIndices.NETWORK_CTX;
import static com.hedera.node.app.service.mono.state.migration.StateChildIndices.TOKEN_ASSOCIATIONS;
import static com.hedera.node.app.service.mono.state.migration.StateChildIndices.UNIQUE_TOKENS;
import static com.hedera.node.app.service.token.impl.TokenServiceImpl.NFTS_KEY;
import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKEN_RELS_KEY;
import static java.util.Objects.requireNonNull;

import com.hedera.hapi.node.base.NftID;
import com.hedera.hapi.node.base.TokenAssociation;
import com.hedera.hapi.node.state.blockrecords.BlockInfo;
import com.hedera.hapi.node.state.token.Nft;
import com.hedera.hapi.node.state.token.TokenRelation;
import com.hedera.node.app.records.BlockRecordService;
import com.hedera.node.app.service.mono.state.merkle.MerkleNetworkContext;
import com.hedera.node.app.service.token.TokenService;
Expand All @@ -47,12 +53,15 @@
*/
public class StateDumper {
private static final String SEMANTIC_UNIQUE_TOKENS = "uniqueTokens.txt";
private static final String SEMANTIC_TOKEN_RELATIONS = "tokenRelations.txt";

public static void dumpMonoChildrenFrom(
@NonNull final MerkleHederaState state, @NonNull final DumpCheckpoint checkpoint) {
final MerkleNetworkContext networkContext = state.getChild(NETWORK_CTX);
final var dumpLoc = getExtantDumpLoc("mono", networkContext.consensusTimeOfLastHandledTxn());
dumpMonoUniqueTokens(Paths.get(dumpLoc, SEMANTIC_UNIQUE_TOKENS), state.getChild(UNIQUE_TOKENS), checkpoint);
dumpMonoTokenRelations(
Paths.get(dumpLoc, SEMANTIC_TOKEN_RELATIONS), state.getChild(TOKEN_ASSOCIATIONS), checkpoint);
}

public static void dumpModChildrenFrom(
Expand All @@ -68,6 +77,9 @@ public static void dumpModChildrenFrom(
final VirtualMap<OnDiskKey<NftID>, OnDiskValue<Nft>> uniqueTokens =
requireNonNull(state.getChild(state.findNodeIndex(TokenService.NAME, NFTS_KEY)));
dumpModUniqueTokens(Paths.get(dumpLoc, SEMANTIC_UNIQUE_TOKENS), uniqueTokens, checkpoint);
final VirtualMap<OnDiskKey<TokenAssociation>, OnDiskValue<TokenRelation>> tokenRelations =
requireNonNull(state.getChild(state.findNodeIndex(TokenService.NAME, TOKEN_RELS_KEY)));
dumpModTokenRelations(Paths.get(dumpLoc, SEMANTIC_TOKEN_RELATIONS), tokenRelations, checkpoint);
}

private static String getExtantDumpLoc(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
/*
* Copyright (C) 2024 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.node.app.bbm.associations;

import com.hedera.hapi.node.state.token.TokenRelation;
import com.hedera.node.app.service.mono.state.submerkle.EntityId;
import com.hedera.node.app.service.mono.state.virtual.entities.OnDiskTokenRel;
import com.hedera.node.app.service.mono.utils.EntityNumPair;
import com.hedera.node.app.state.merkle.disk.OnDiskValue;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.TokenID;
import com.swirlds.base.utility.Pair;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;

record TokenAssociation(
EntityId accountId,
EntityId tokenId,
long balance,
boolean isFrozen,
boolean isKycGranted,
boolean isAutomaticAssociation,
EntityId prev,
EntityId next) {

@NonNull
static TokenAssociation fromMono(@NonNull final OnDiskTokenRel tokenRel) {
final var at = toLongsPair(toPair(tokenRel.getKey()));

return new TokenAssociation(
entityIdFrom(at.left()),
entityIdFrom(at.right()),
tokenRel.getBalance(),
tokenRel.isFrozen(),
tokenRel.isKycGranted(),
tokenRel.isAutomaticAssociation(),
entityIdFrom(tokenRel.getPrev()),
entityIdFrom(tokenRel.getNext()));
}

static TokenAssociation fromMod(@NonNull final OnDiskValue<TokenRelation> wrapper) {
final var value = wrapper.getValue();
return new TokenAssociation(
accountIdFromMod(value.accountId()),
tokenIdFromMod(value.tokenId()),
value.balance(),
value.frozen(),
value.kycGranted(),
value.automaticAssociation(),
tokenIdFromMod(value.previousToken()),
tokenIdFromMod(value.nextToken()));
}

@NonNull
static Pair<AccountID, TokenID> toPair(@NonNull final EntityNumPair enp) {
final var at = enp.asAccountTokenRel();
return Pair.of(at.getLeft(), at.getRight());
}

@NonNull
static Pair<Long, Long> toLongsPair(@NonNull final Pair<AccountID, TokenID> pat) {
return Pair.of(pat.left().getAccountNum(), pat.right().getTokenNum());
}

static EntityId accountIdFromMod(@Nullable final com.hedera.hapi.node.base.AccountID accountId) {
return null == accountId ? EntityId.MISSING_ENTITY_ID : new EntityId(0L, 0L, accountId.accountNumOrThrow());
}

static EntityId tokenIdFromMod(@Nullable final com.hedera.hapi.node.base.TokenID tokenId) {
return null == tokenId ? EntityId.MISSING_ENTITY_ID : new EntityId(0L, 0L, tokenId.tokenNum());
}

static EntityId entityIdFrom(long num) {
return new EntityId(0L, 0L, num);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
/*
* Copyright (C) 2024 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.node.app.bbm.associations;

import static com.hedera.node.app.bbm.associations.TokenAssociation.toLongsPair;
import static com.hedera.node.app.bbm.associations.TokenAssociation.toPair;

import com.google.common.collect.ComparisonChain;
import com.hedera.node.app.bbm.utils.Writer;
import com.hedera.node.app.service.mono.state.virtual.entities.OnDiskTokenRel;
import com.hedera.node.app.state.merkle.disk.OnDiskKey;
import edu.umd.cs.findbugs.annotations.NonNull;

record TokenAssociationId(long accountId, long tokenId) implements Comparable<TokenAssociationId> {
static TokenAssociationId fromMod(@NonNull final com.hedera.hapi.node.base.TokenAssociation association) {
return new TokenAssociationId(
association.accountId().accountNum(), association.tokenId().tokenNum());
}

static TokenAssociationId fromMono(@NonNull final OnDiskKey<OnDiskTokenRel> tokenRel) {
final var key = toLongsPair(toPair(tokenRel.getKey().getKey()));
;
return new TokenAssociationId(key.left(), key.right());
}

@Override
public String toString() {
return "%d%s%d".formatted(accountId, Writer.FIELD_SEPARATOR, tokenId);
}

@Override
public int compareTo(TokenAssociationId o) {
return ComparisonChain.start()
.compare(this.accountId, o.accountId)
.compare(this.tokenId, o.tokenId)
.result();
}
}
Loading

0 comments on commit dbb8b4e

Please sign in to comment.