diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/StateDumper.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/StateDumper.java index 2550497b22b6..7027b37a363a 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/StateDumper.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/StateDumper.java @@ -16,6 +16,8 @@ package com.hedera.node.app.bbm; +import static com.hedera.node.app.bbm.accounts.AccountDumpUtils.dumpModAccounts; +import static com.hedera.node.app.bbm.accounts.AccountDumpUtils.dumpMonoAccounts; import static com.hedera.node.app.bbm.associations.TokenAssociationsDumpUtils.dumpModTokenRelations; import static com.hedera.node.app.bbm.associations.TokenAssociationsDumpUtils.dumpMonoTokenRelations; import static com.hedera.node.app.bbm.files.FilesDumpUtils.dumpModFiles; @@ -23,18 +25,22 @@ import static com.hedera.node.app.bbm.nfts.UniqueTokenDumpUtils.dumpModUniqueTokens; import static com.hedera.node.app.bbm.nfts.UniqueTokenDumpUtils.dumpMonoUniqueTokens; import static com.hedera.node.app.records.BlockRecordService.BLOCK_INFO_STATE_KEY; +import static com.hedera.node.app.service.mono.state.migration.StateChildIndices.ACCOUNTS; import static com.hedera.node.app.service.mono.state.migration.StateChildIndices.NETWORK_CTX; import static com.hedera.node.app.service.mono.state.migration.StateChildIndices.STORAGE; import static com.hedera.node.app.service.mono.state.migration.StateChildIndices.TOKEN_ASSOCIATIONS; import static com.hedera.node.app.service.mono.state.migration.StateChildIndices.UNIQUE_TOKENS; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ACCOUNTS_KEY; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.NFTS_KEY; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKEN_RELS_KEY; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.FileID; import com.hedera.hapi.node.base.NftID; import com.hedera.hapi.node.base.TokenAssociation; import com.hedera.hapi.node.state.blockrecords.BlockInfo; +import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Nft; import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.node.app.records.BlockRecordService; @@ -61,6 +67,7 @@ public class StateDumper { private static final String SEMANTIC_UNIQUE_TOKENS = "uniqueTokens.txt"; private static final String SEMANTIC_TOKEN_RELATIONS = "tokenRelations.txt"; private static final String SEMANTIC_FILES = "files.txt"; + private static final String SEMANTIC_ACCOUNTS = "accounts.txt"; public static void dumpMonoChildrenFrom( @NonNull final MerkleHederaState state, @NonNull final DumpCheckpoint checkpoint) { @@ -70,6 +77,7 @@ public static void dumpMonoChildrenFrom( dumpMonoTokenRelations( Paths.get(dumpLoc, SEMANTIC_TOKEN_RELATIONS), state.getChild(TOKEN_ASSOCIATIONS), checkpoint); dumpMonoFiles(Paths.get(dumpLoc, SEMANTIC_FILES), state.getChild(STORAGE), checkpoint); + dumpMonoAccounts(Paths.get(dumpLoc, SEMANTIC_TOKEN_RELATIONS), state.getChild(ACCOUNTS), checkpoint); } public static void dumpModChildrenFrom( @@ -94,6 +102,10 @@ public static void dumpModChildrenFrom( final VirtualMap, OnDiskValue> files = requireNonNull(state.getChild(state.findNodeIndex(FileService.NAME, FileServiceImpl.BLOBS_KEY))); dumpModFiles(Paths.get(dumpLoc, SEMANTIC_FILES), files, checkpoint); + + final VirtualMap, OnDiskValue> accounts = + requireNonNull(state.getChild(state.findNodeIndex(TokenService.NAME, ACCOUNTS_KEY))); + dumpModAccounts(Paths.get(dumpLoc, SEMANTIC_ACCOUNTS), accounts, checkpoint); } private static String getExtantDumpLoc( diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/accounts/AccountDumpUtils.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/accounts/AccountDumpUtils.java new file mode 100644 index 000000000000..7afbc858010a --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/accounts/AccountDumpUtils.java @@ -0,0 +1,488 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.bbm.accounts; + +import static com.swirlds.common.threading.manager.AdHocThreadManager.getStaticThreadManager; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.node.app.bbm.DumpCheckpoint; +import com.hedera.node.app.bbm.utils.ThingsToStrings; +import com.hedera.node.app.bbm.utils.Writer; +import com.hedera.node.app.service.mono.state.adapters.VirtualMapLike; +import com.hedera.node.app.service.mono.state.virtual.EntityNumVirtualKey; +import com.hedera.node.app.service.mono.state.virtual.entities.OnDiskAccount; +import com.hedera.node.app.state.merkle.disk.OnDiskKey; +import com.hedera.node.app.state.merkle.disk.OnDiskValue; +import com.swirlds.virtualmap.VirtualKey; +import com.swirlds.virtualmap.VirtualMap; +import com.swirlds.virtualmap.VirtualValue; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiPredicate; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.function.Supplier; +import java.util.stream.Collector; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.lang3.tuple.Pair; + +public class AccountDumpUtils { + + /** String that separates all fields in the CSV format, and also the primitive-typed fields from each other and + * the other-typed fields from each other in the compressed format. + */ + private static final String FIELD_SEPARATOR = ";"; + + /** String that separates sub-fields (the primitive-type fields) in the compressed format. */ + private static final String SUBFIELD_SEPARATOR = ","; + + /** String that separates field names from field values in the compressed format */ + private static final String NAME_TO_VALUE_SEPARATOR = ":"; + + /** A field joiner that joins _subfields_ with `,` (i.e., _not_ the CSV field separator) and wraps the entire + * thing in parentheses. */ + private static final Collector parenWrappingJoiner = + Collectors.joining(SUBFIELD_SEPARATOR, "(", ")"); + + private AccountDumpUtils() { + // Utility class + } + + public static void dumpMonoAccounts( + @NonNull final Path path, + @NonNull final VirtualMap accounts, + @NonNull final DumpCheckpoint checkpoint) { + + try (@NonNull final var writer = new Writer(path)) { + HederaAccount[] dumpableAccounts = gatherAccounts(accounts, HederaAccount::fromMono); + reportOnAccounts(writer, dumpableAccounts); + System.out.printf( + "=== mod accounts report is %d bytes at checkpoint %s%n", writer.getSize(), checkpoint.name()); + } + } + + public static void dumpModAccounts( + @NonNull final Path path, + @NonNull final VirtualMap, OnDiskValue> accounts, + @NonNull final DumpCheckpoint checkpoint) { + try (@NonNull final var writer = new Writer(path)) { + HederaAccount[] dumpableAccounts = gatherAccounts(accounts, HederaAccount::fromMod); + reportOnAccounts(writer, dumpableAccounts); + System.out.printf( + "=== mod accounts report is %d bytes at checkpoint %s%n", writer.getSize(), checkpoint.name()); + } + } + + @NonNull + private static HederaAccount[] gatherAccounts( + @NonNull VirtualMap accounts, @NonNull Function mapper) { + final var accountsToReturn = new ConcurrentLinkedQueue(); + final var threadCount = 8; + final var processed = new AtomicInteger(); + + try { + VirtualMapLike.from(accounts) + .extractVirtualMapData( + getStaticThreadManager(), + p -> { + processed.incrementAndGet(); + accountsToReturn.add(mapper.apply(p.right())); + }, + threadCount); + } catch (final InterruptedException ex) { + System.err.println("*** Traversal of accounts virtual map interrupted!"); + Thread.currentThread().interrupt(); + } + + final var accountsArr = accountsToReturn.toArray(new HederaAccount[0]); + Arrays.parallelSort( + accountsArr, Comparator.comparingLong(a -> a.accountId().accountNum())); + System.out.printf("=== %d accounts iterated over (%d saved)%n", processed.get(), accountsArr.length); + + return accountsArr; + } + + private static void reportOnAccounts(@NonNull final Writer writer, @NonNull final HederaAccount[] accountsArr) { + writer.write("account#"); + writer.write(FIELD_SEPARATOR); + writer.write(formatCsvHeader(allFieldNamesInOrder())); + writer.newLine(); + + final var sb = new StringBuilder(); + Arrays.stream(accountsArr).map(a -> formatAccount(sb, a)).forEachOrdered(s -> { + writer.write(s); + writer.newLine(); + }); + } + + /** Produces the CSV header line: A CSV line from all the field names in the deterministic order. */ + @NonNull + private static String formatCsvHeader(@NonNull final List names) { + return String.join(FIELD_SEPARATOR, names); + } + + /** Returns the list of _all_ field names in the deterministic order, expanding the abbreviations to the full + * field name. + */ + @NonNull + private static List allFieldNamesInOrder() { + final var r = new ArrayList(50); + r.addAll(getFieldNamesInOrder(booleanFieldsMapping)); + r.addAll(getFieldNamesInOrder(intFieldsMapping)); + r.addAll(getFieldNamesInOrder(longFieldsMapping)); + r.addAll(getFieldNamesInOrder(getFieldAccessors(new StringBuilder(), HederaAccount.DUMMY_ACCOUNT), false)); + return r.stream().map(s -> fieldNameMap.getOrDefault(s, s)).toList(); + } + + /** Given one of the primitive-type mappings above, extract the field names, and sort them */ + private static > List getFieldNamesInOrder( + @NonNull List>> mapping) { + return mapping.stream().map(Pair::getLeft).sorted().toList(); + } + + /** Given the field mappings above, extract the field names, and sort them */ + // (Overload needed because of type erasure; ugly but seemed to me less ugly than an alternate name, YMMV) + @SuppressWarnings("java:S1172") // "remove unused method parameter 'ignored'" - nope, needed as described aboved + private static List getFieldNamesInOrder(@NonNull final List> fields, final boolean ignored) { + return fields.stream().map(Field::name).sorted().toList(); + } + + /** Formats an entire account as a text string. First field of the string is the account number, followed by all + * of its fields. + */ + @NonNull + private static String formatAccount(@NonNull final StringBuilder sb, @NonNull final HederaAccount a) { + sb.setLength(0); + sb.append(a.accountId().accountNum()); + formatAccountBooleans(sb, a, "bools"); + formatAccountInts(sb, a, "ints"); + formatAccountLongs(sb, a, "longs"); + formatAccountOtherFields(sb, a); + return sb.toString(); + } + + /** Formats all the `boolean`-valued fields of an account, using the mapping `booleanFieldsMapping`. */ + private static void formatAccountBooleans( + @NonNull final StringBuilder sb, @NonNull final HederaAccount a, @NonNull final String name) { + formatAccountFieldsForDifferentOutputFormats( + sb, a, name, booleanFieldsMapping, false, b -> !b, AccountDumpUtils::tagOnlyFieldFormatter); + } + + /** A field formatter that only emits the _name_ of the field. Used for boolean fields in compressed format. */ + @NonNull + private static String tagOnlyFieldFormatter(@NonNull final Pair p) { + return p.getLeft(); + } + + /** Formats all the `int`-valued fields of an account, using the mapping `intFieldsMapping`. */ + private static void formatAccountInts( + @NonNull final StringBuilder sb, @NonNull final HederaAccount a, @NonNull final String name) { + formatAccountFieldsForDifferentOutputFormats( + sb, a, name, intFieldsMapping, 0, n -> n == 0, AccountDumpUtils::taggedFieldFormatter); + } + + /** Formats all the `long`-valued fields of an account, using the mapping `longFieldsMapping`. */ + private static void formatAccountLongs( + @NonNull final StringBuilder sb, @NonNull final HederaAccount a, @NonNull final String name) { + formatAccountFieldsForDifferentOutputFormats( + sb, a, name, longFieldsMapping, 0L, n -> n == 0L, AccountDumpUtils::taggedFieldFormatter); + } + + /** Exceptions coming out of lambdas need to be swallowed. This is ok because the cause is always a missing field + * that should not have been accessed, and the check for that is always made by the caller: The caller sees if + * anything got added to the accumulating stringbuffer, or not. + */ + @NonNull + private static R applySwallowingExceptions( + @NonNull final Function fn, @NonNull final HederaAccount a, @NonNull R missingValue) { + try { + return fn.apply(a); + } catch (final RuntimeException ex) { + return missingValue; + } + } + + /** Given a mapping from field names to both a field extraction function (extract from an account) and a field + * formatter (type-specific), produce the formatted form of all the fields given in the mapping. Can do either of + * the `Format`s: CSV or compressed fields. + */ + private static void formatAccountOtherFields(@NonNull final StringBuilder sb, @NonNull HederaAccount a) { + final var fieldAccessors = getFieldAccessors(sb, a); + for (final var fieldAccessor : fieldAccessors) { + final var l = sb.length(); + formatFieldSep(sb, fieldAccessor.name()); + if (!fieldAccessor.apply()) { + sb.setLength(l); + applySwallowingExceptions(fieldAccessor); + } + } + } + + private static boolean applySwallowingExceptions(@NonNull final Field field) { + try { + return field.apply(); + } catch (final RuntimeException ex) { + return false; + } + } + + /** A mapping for all account fields that are _not_ of primitive type. Takes the field name to a `Field`, which + * holds the field name, the field extractor ,and the field formatter. And it _is_ a "mapping" even though it isn't + * actually a `Map` data structure like the other mappings for primitive typed fields. */ + @SuppressWarnings({"java:S1452", "java:S2681"}) + // 1452: generic wildcard types should not be used in return types - yes, but this is a collection of `Field`s + // of unrelated types, yet `Object` is not appropriate either + // 2681: a complaint about no braces around `then` clause - yep, intentional, and correct + // spotless:off + private static List> getFieldAccessors(@NonNull final StringBuilder sb, @NonNull final HederaAccount a) { + return Stream.of( + Field.of("1stContractStorageKey", a::firstContractStorageKey, + doWithBuilder(sb, ThingsToStrings::getMaybeStringifyByteString)), + Field.of("accountKey", a::key, doWithBuilder(sb, ThingsToStrings::toStringOfKey)), + Field.of("alias", a::alias, doWithBuilder(sb, ThingsToStrings::getMaybeStringifyByteString)), + Field.of("approveForAllNfts", a::approveForAllNftAllowances, + doWithBuilder(sb, ThingsToStrings::toStringOfApprovalForAllAllowances)), + Field.of("autoRenewAccount", a::autoRenewAccountId, + doWithBuilder(sb, ThingsToStrings::toStringOfAccountId)), + Field.of("cryptoAllowances", a::cryptoAllowances, + doWithBuilder(sb, ThingsToStrings::toStringOfAccountCryptoAllowances)), + Field.of("firstUint256Key", a::getFirstUint256Key, doWithBuilder(sb, ThingsToStrings::toStringOfIntArray)), + Field.of("fungibleTokenAllowances", a::tokenAllowances, + doWithBuilder(sb, ThingsToStrings::toStringOfAccountFungibleTokenAllowances)), + Field.of("headNftKey", a::getHeadNftKey, doWithBuilder(sb, ThingsToStrings::toStringOfEntityNumPair)), + Field.of("latestAssociation", a::getLatestAssociation, + doWithBuilder(sb, ThingsToStrings::toStringOfEntityNumPair)), + Field.of("memo", a::memo, s -> { + if (s.isEmpty()) { + return false; + } + sb.append(ThingsToStrings.quoteForCsv(FIELD_SEPARATOR, s)); + return true; + }), + Field.of("proxy", a::getProxy, doWithBuilder(sb, ThingsToStrings::toStringOfAccountId)) + ).sorted(Comparator.comparing(Field::name)).toList(); + } + // spotless:on + + /** Apply a formatter, given a `StringBuilder` and return whether (or not) the field _existed_ and should be + * emitted. */ + private static Predicate doWithBuilder( + @NonNull final StringBuilder sb, @NonNull final BiPredicate bifn) { + return t -> bifn.test(sb, t); + } + + record Field(@NonNull String name, @NonNull Supplier supplier, @NonNull Predicate formatter) { + + static Field of( + @NonNull final String name, + @NonNull final Supplier supplier, + @NonNull final Predicate formatter) { + return new Field<>(name, supplier, formatter); + } + + /** Convenience method to extract the field from the account then apply the formatter to it. */ + boolean apply() { + return formatter.test(supplier.get()); + } + } + + /** A mapping for all `int`-valued fields that takes the field name to the field extractor. */ + private static final List>> intFieldsMapping = List.of( + Pair.of("#+B", HederaAccount::numberPositiveBalances), + Pair.of("#A", HederaAccount::numberAssociations), + Pair.of("#KV", HederaAccount::contractKvPairsNumber), + Pair.of("#TT", HederaAccount::numberTreasuryTitles), + Pair.of("#UAA", HederaAccount::usedAutoAssociations), + Pair.of("^AA", HederaAccount::maxAutoAssociations)); + + /** A mapping for all `boolean`-valued fields that takes the field name to the field extractor. */ + private static final List>> booleanFieldsMapping = List.of( + Pair.of("AR", h -> h.autoRenewAccountId() != null), + Pair.of("BR", a -> a.stakeAtStartOfLastRewardedPeriod() != -1L), + Pair.of("DL", HederaAccount::deleted), + Pair.of("DR", HederaAccount::declineReward), + Pair.of("ER", HederaAccount::expiredAndPendingRemoval), + Pair.of("HA", a -> a.alias() != null), + Pair.of("IM", HederaAccount::isImmutable), + Pair.of("PR", a -> (int) a.stakedId().value() < 0 && !a.declineReward()), + Pair.of("RSR", HederaAccount::receiverSigRequired), + Pair.of("SC", HederaAccount::smartContract), + Pair.of("TT", a -> a.numberTreasuryTitles() > 0)); + + /** A mapping for all `long`-valued fields that takes the field name to the field extractor. */ + private static final List>> longFieldsMapping = List.of( + Pair.of("#NFT", HederaAccount::numberOwnedNfts), + Pair.of("ARS", HederaAccount::autoRenewSeconds), + Pair.of("B", a -> (long) a.numberPositiveBalances()), + Pair.of("EX", HederaAccount::expirationSecond), + Pair.of("HNSN", a -> a.headNftId().serialNumber()), + Pair.of("HNTN", a -> a.headNftId().tokenId().tokenNum()), + Pair.of("HTI", a -> a.headTokenId().tokenNum()), + Pair.of("N", HederaAccount::ethereumNonce), + Pair.of("SID", a -> (long) a.stakedId().value()), + Pair.of("SNID", HederaAccount::stakedNodeAddressBookId), + Pair.of("SPS", coerceMinus1ToBeDefault(HederaAccount::stakePeriodStart)), + Pair.of("STM", HederaAccount::stakedToMe), + Pair.of("TS", HederaAccount::totalStake), + Pair.of("TSL", coerceMinus1ToBeDefault(HederaAccount::stakeAtStartOfLastRewardedPeriod))); + + /** For the compressed field output we want to have field name abbreviations (for compactness), but for the CSV + * output we can afford the full names. This maps between them. (Only the primitive-valued fields have the + * abbreviations.) + */ + private static final Map fieldNameMap = toMap( + "#+B", "numPositiveBalances", + "#A", "numAssociations", + "#KV", "numContractKvPairs", + "#NFT", "numNftsOwned", + "#TT", "numTreasuryTitles", + "#UAA", "numUsedAutoAssociations", + "AR", "hasAutoRenewAccount", + "ARS", "autoRenewSecs", + "B", "balance", + "BR", "hasBeenRewardedSinceLastStakeMetaChange", + "DL", "deleted", + "DR", "declinedReward", + "ER", "expiredAndPendingRemoval", + "EX", "expiry", + "HA", "hasAlias", + "HNSN", "headNftSerialNum", + "HNTN", "headNftTokenNum", + "HTI", "headTokenId", + "IM", "immutable", + "N", "ethereumNonce", + "PR", "mayHavePendingReward", + "RSR", "receiverSigRequired", + "SC", "smartContract", + "SID", "stakedId", + "SNID", "stakedNodeAddressBookId", + "SPS", "stakePeriodStart", + "STM", "stakedToMe", + "TS", "totalStake", + "TSL", "totalStakeAtStartOfLastRewardedPeriod", + "TT", "tokenTreasury", + "^AA", "maxAutomaticAssociations"); + + /** `Map.of` only has 11 overloads - for up to 10 entries. After that there's a variadic `Map.ofEntries` which is + * klunky because it takes `Map.Entry`s. So this is the variadic form of `Map.of`. Not sure why the Java people + * didn't just put this in the `Map` class. + */ + @NonNull + private static Map toMap(String... es) { + if (0 != es.length % 2) { + throw new IllegalArgumentException( + "must have even number of args to `toMap`, %d given".formatted(es.length)); + } + final var r = new TreeMap(); + for (int i = 0; i < es.length; i += 2) { + r.put(es[i], es[i + 1]); + } + return r; + } + + /** Given a mapping from field names (or abbreviations) to a field extraction function (extract from an account) + * produce the formatted form of all the fields given in the mapping. Can do either of the `Format`s: CSV or + * compressed fields. + * @param sb Accumulating `StringBuffer` + * @param a Account to get field from + * @param mapping Mapping of field name (or abbreviation) to its extraction method + * @param isDefaultValue Predicate to decide if this field has its default value (and can be elided) + * @param formatField Method taking field name _and_ value to a string + */ + private static > void formatAccountFieldsForDifferentOutputFormats( + @NonNull final StringBuilder sb, + @NonNull final HederaAccount a, + @NonNull final String name, + @NonNull List>> mapping, + @NonNull T missingValue, + @NonNull Predicate isDefaultValue, + @NonNull Function, String> formatField) { + final var l = sb.length(); + + formatFieldSep(sb, name); + formatAccountFields(sb, a, mapping, missingValue, isDefaultValue, formatField, parenWrappingJoiner); + + if (sb.length() - l <= 0) { + sb.setLength(l); + } + } + + /** Given a mapping from field names (or abbreviations) to a field extraction function (extract from an account) + * produce the formatted form of all the fields given in the mapping. Takes some additional function arguments + * that "customize" the formatting of this field + * @param sb Accumulating `StringBuffer` + * @param a Account to get field from + * @param mapping Mapping of field name (or abbreviation) to its extraction method + * @param isDefaultValue Predicate to decide if this field has its default value (and can be elided) + * @param formatField Method taking field name _and_ value to a string + * @param joinFields Stream collector to join multiple field values + */ + private static > void formatAccountFields( + @NonNull final StringBuilder sb, + @NonNull final HederaAccount a, + @NonNull List>> mapping, + @NonNull T missingValue, + @NonNull Predicate isDefaultValue, + @NonNull Function, String> formatField, + @NonNull Collector joinFields) { + sb.append(mapping.stream() + .map(p -> Pair.of(p.getLeft(), applySwallowingExceptions(p.getRight(), a, missingValue))) + .filter(p -> p.getRight() != null && !isDefaultValue.test(p.getRight())) + .sorted(Comparator.comparing(Pair::getLeft)) + .map(formatField) + .collect(joinFields)); + } + + /** A simple formatter for field names in the compressed fields case: writes the field separator then `{name}:` */ + private static void formatFieldSep(@NonNull final StringBuilder sb, @NonNull final String name) { + sb.append(FIELD_SEPARATOR); + sb.append(name); + sb.append(NAME_TO_VALUE_SEPARATOR); + } + + /** A Field formatter that emits fields as "name:value". Used for non-boolean fields in compressed format. */ + @NonNull + private static String taggedFieldFormatter(@NonNull final Pair p) { + return p.getLeft() + NAME_TO_VALUE_SEPARATOR + p.getRight(); + } + + /** Unfortunately this is a hack to handle the two long-valued fields where `-1` is used as the "missing" marker. + * Probably all the primitive-valued fields should be changed to use `Field` descriptors, which would then be + * enhanced to have a per-field "is default value?" predicate. But not now.) + */ + @SuppressWarnings( + "java:S4276") // Functional interfaces should be as specialized as possible - except not in this case, for + // consistency + @NonNull + private static Function coerceMinus1ToBeDefault( + @NonNull final Function fn) { + return a -> { + final var v = fn.apply(a); + return v == -1 ? 0 : v; + }; + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/accounts/HederaAccount.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/accounts/HederaAccount.java new file mode 100644 index 000000000000..defa5de57302 --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/accounts/HederaAccount.java @@ -0,0 +1,245 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.bbm.accounts; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.NftID; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.Account.StakedIdOneOfType; +import com.hedera.hapi.node.state.token.AccountApprovalForAllAllowance; +import com.hedera.hapi.node.state.token.AccountCryptoAllowance; +import com.hedera.hapi.node.state.token.AccountFungibleTokenAllowance; +import com.hedera.node.app.service.mono.pbj.PbjConverter; +import com.hedera.node.app.service.mono.state.submerkle.FcTokenAllowanceId; +import com.hedera.node.app.service.mono.state.virtual.entities.OnDiskAccount; +import com.hedera.node.app.service.mono.utils.EntityNum; +import com.hedera.node.app.service.mono.utils.EntityNumPair; +import com.hedera.node.app.state.merkle.disk.OnDiskValue; +import com.hedera.pbj.runtime.OneOf; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public record HederaAccount( + @Nullable AccountID accountId, + @NonNull Bytes alias, + @Nullable Key key, + long expirationSecond, + long tinybarBalance, + String memo, + boolean deleted, + long stakedToMe, + long stakePeriodStart, + OneOf stakedId, + boolean declineReward, + boolean receiverSigRequired, + @Nullable TokenID headTokenId, + @Nullable NftID headNftId, + long headNftSerialNumber, + long numberOwnedNfts, + int maxAutoAssociations, + int usedAutoAssociations, + int numberAssociations, + boolean smartContract, + int numberPositiveBalances, + long ethereumNonce, + long stakeAtStartOfLastRewardedPeriod, + @Nullable AccountID autoRenewAccountId, + long autoRenewSeconds, + int contractKvPairsNumber, + @Nullable List cryptoAllowances, + @Nullable List approveForAllNftAllowances, + @Nullable List tokenAllowances, + int numberTreasuryTitles, + boolean expiredAndPendingRemoval, + @NonNull Bytes firstContractStorageKey, + boolean immutable, + long stakedNodeAddressBookId) { + + private static final AccountID MISSING_ACCOUNT_ID = AccountID.DEFAULT; + public static final long ONE_HBAR_IN_TINYBARS = 100_000_000L; + public static final HederaAccount DUMMY_ACCOUNT = new HederaAccount( + null, null, null, 0, 0, "", false, 0, 0, null, false, false, null, null, 0, 0, 0, 0, 0, false, 0, 0, 0, + null, 0, 0, null, null, null, 0, false, null, false, 0); + + public static HederaAccount fromMono(OnDiskAccount account) { + return new HederaAccount( + AccountID.newBuilder().accountNum(account.getAccountNumber()).build(), + Bytes.wrap(account.getAlias().toByteArray()), + PbjConverter.asPbjKey(account.getKey()), + account.getExpiry(), + account.getBalance() * ONE_HBAR_IN_TINYBARS, + account.getMemo(), + account.isDeleted(), + account.getStakedToMe(), + account.getStakePeriodStart(), + new OneOf<>(StakedIdOneOfType.STAKED_ACCOUNT_ID, account.getStakedId()), + account.isDeclineReward(), + account.isReceiverSigRequired(), + new TokenID(0, 0, account.getHeadTokenId()), + new NftID(new TokenID(0, 0, account.getHeadNftId()), account.getHeadNftSerialNum()), + account.getHeadNftSerialNum(), + account.getNftsOwned(), + account.getMaxAutoAssociations(), + account.getUsedAutoAssociations(), + account.getNumAssociations(), + account.isSmartContract(), + account.getNumPositiveBalances(), + account.getEthereumNonce(), + account.getStakeAtStartOfLastRewardedPeriod(), + account.getAutoRenewAccount() != null + ? account.getAutoRenewAccount().toPbjAccountId() + : null, + account.getAutoRenewSecs(), + account.getNumContractKvPairs(), + toCryptoAllowance(account.getCryptoAllowances()), + toApproveForAllNftAllowances(account.getApproveForAllNfts()), + toAccountFungibleTokenAllowance(account.getFungibleTokenAllowances()), + account.getNumTreasuryTitles(), + account.isExpiredAndPendingRemoval(), + toBytes(account.getFirstContractStorageKey().getKey()), + account.isImmutable(), + account.getStakedNodeAddressBookId()); + } + + @NonNull + private static Bytes toBytes(@NonNull final int[] packed) { + final var buf = ByteBuffer.allocate(32); + buf.asIntBuffer().put(packed); + return Bytes.wrap(buf.array()); + } + + private static List toCryptoAllowance(Map cryptoAllowanceMap) { + return cryptoAllowanceMap.entrySet().stream() + .map(c -> new AccountCryptoAllowance(c.getKey().toEntityId().toPbjAccountId(), c.getValue())) + .toList(); + } + + private static List toApproveForAllNftAllowances( + Set allowanceIds) { + return allowanceIds.stream() + .map(a -> new AccountApprovalForAllAllowance( + new TokenID(0, 0, a.getTokenNum().longValue()), + a.getSpenderNum().toEntityId().toPbjAccountId())) + .toList(); + } + + private static List toAccountFungibleTokenAllowance( + Map tokenAllowanceMap) { + return tokenAllowanceMap.entrySet().stream() + .map(t -> new AccountFungibleTokenAllowance( + new TokenID(0, 0, t.getKey().getTokenNum().longValue()), + t.getKey().getSpenderNum().toEntityId().toPbjAccountId(), + t.getValue())) + .toList(); + } + + public static HederaAccount fromMod(OnDiskValue account) { + return new HederaAccount( + account.getValue().accountId(), + account.getValue().alias(), + account.getValue().key(), + account.getValue().expirationSecond(), + account.getValue().tinybarBalance(), + account.getValue().memo(), + account.getValue().deleted(), + account.getValue().stakedToMe(), + account.getValue().stakePeriodStart(), + account.getValue().stakedId(), + account.getValue().declineReward(), + account.getValue().receiverSigRequired(), + account.getValue().headTokenId(), + account.getValue().headNftId(), + account.getValue().headNftSerialNumber(), + account.getValue().numberOwnedNfts(), + account.getValue().maxAutoAssociations(), + account.getValue().usedAutoAssociations(), + account.getValue().numberAssociations(), + account.getValue().smartContract(), + account.getValue().numberPositiveBalances(), + account.getValue().ethereumNonce(), + account.getValue().stakeAtStartOfLastRewardedPeriod(), + account.getValue().autoRenewAccountId(), + account.getValue().autoRenewSeconds(), + account.getValue().contractKvPairsNumber(), + account.getValue().cryptoAllowances(), + account.getValue().approveForAllNftAllowances(), + account.getValue().tokenAllowances(), + account.getValue().numberTreasuryTitles(), + account.getValue().expiredAndPendingRemoval(), + account.getValue().firstContractStorageKey(), + isAccountImmutable(account.getValue()), + account.getValue().stakedNodeId() != null ? account.getValue().stakedNodeId() : 0); + } + + private static final AccountID immutableAccount1 = + AccountID.newBuilder().accountNum(800).build(); + private static final AccountID immutableAccount2 = + AccountID.newBuilder().accountNum(801).build(); + private static final List immutableAccounts = List.of(immutableAccount1, immutableAccount2); + + private static boolean isAccountImmutable(Account account) { + return immutableAccounts.contains(account.accountId()); + } + + public boolean isImmutable() { + return immutable; + } + + public EntityNumPair getHeadNftKey() { + if (headNftId() == null || headNftId().tokenId() == null) { + return null; + } + + return EntityNumPair.fromLongs(headNftId().tokenId().tokenNum(), headNftSerialNumber); + } + + public EntityNumPair getLatestAssociation() { + if (accountId == null || accountId.accountNum() == null || headTokenId() == null) { + return null; + } + return EntityNumPair.fromLongs(accountId.accountNum(), headTokenId().tokenNum()); + } + + public long totalStake() { + return tinybarBalance() / ONE_HBAR_IN_TINYBARS + stakedToMe(); + } + + public AccountID getProxy() { + return MISSING_ACCOUNT_ID; + } + + public int[] getFirstUint256Key() { + return toInts(firstContractStorageKey); + } + + private static int[] toInts(Bytes bytes) { + final var bytesArray = bytes.toByteArray(); + ByteBuffer buf = ByteBuffer.wrap(bytesArray); + int[] ints = new int[bytesArray.length / 4]; + for (int i = 0; i < ints.length; i++) { + ints[i] = buf.getInt(); + } + return ints; + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/utils/ThingsToStrings.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/utils/ThingsToStrings.java index faf18b3b0c4e..a8dfb55a9089 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/utils/ThingsToStrings.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/bbm/utils/ThingsToStrings.java @@ -17,6 +17,11 @@ package com.hedera.node.app.bbm.utils; import com.google.protobuf.ByteString; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.token.AccountApprovalForAllAllowance; +import com.hedera.hapi.node.state.token.AccountCryptoAllowance; +import com.hedera.hapi.node.state.token.AccountFungibleTokenAllowance; import com.hedera.node.app.service.mono.legacy.core.jproto.JKey; import com.hedera.node.app.service.mono.state.submerkle.EntityId; import com.hedera.node.app.service.mono.state.submerkle.FcCustomFee; @@ -28,6 +33,7 @@ import com.hedera.node.app.service.mono.state.virtual.ContractKey; import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.mono.utils.EntityNumPair; +import com.hedera.pbj.runtime.io.buffer.Bytes; import com.hederahashgraph.api.proto.java.Key; import com.swirlds.common.crypto.CryptographyHolder; import edu.umd.cs.findbugs.annotations.NonNull; @@ -41,6 +47,7 @@ import java.security.InvalidKeyException; import java.util.Arrays; import java.util.HexFormat; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; @@ -51,6 +58,10 @@ public class ThingsToStrings { + private ThingsToStrings() { + // Utility class + } + /** Quotes a string to be a valid field in a CSV (comma-separated file), as defined in RFC-4180 * (https://datatracker.ietf.org/doc/html/rfc4180) _except_ that we allow the field separator to * be something other than "," (e.g., ";", if we have a lot of fields that contain embedded ","). @@ -124,6 +135,26 @@ public static String toStringOfEntityId(@NonNull EntityId entityId) { return entityId.toAbbrevString(); } + public static boolean toStringOfAccountId(@NonNull final StringBuilder sb, @Nullable AccountID accountID) { + if (accountID == null) { + return false; + } + + sb.append(String.format( + "%d.%d.%s", + accountID.shardNum(), accountID.realmNum(), accountID.account().value())); + return true; + } + + public static boolean toStringOfTokenId(StringBuilder sb, TokenID tokenID) { + if (tokenID == null) { + return false; + } + + sb.append(String.format("%d.%d.%s", tokenID.shardNum(), tokenID.realmNum(), tokenID.tokenNum())); + return true; + } + public static boolean toStringOfEntityId(@NonNull final StringBuilder sb, @Nullable final EntityId entityId) { if (entityId == null || entityId.equals(EntityId.MISSING_ENTITY_ID)) return false; @@ -235,6 +266,19 @@ public static boolean toStringOfContractKey(@NonNull final StringBuilder sb, @Nu return true; } + public static boolean toStringOfKey( + @NonNull final StringBuilder sb, @Nullable final com.hedera.hapi.node.base.Key key) { + if (key == null) { + return false; + } + try { + return toStringOfJKey(sb, JKey.convertKey(key, 15 /*JKey.MAX_KEY_DEPTH*/)); + } catch (final InvalidKeyException ignored) { + sb.append(""); + return true; + } + } + public static boolean toStringOfFcTokenAllowanceId( @NonNull final StringBuilder sb, @Nullable final FcTokenAllowanceId id) { if (id == null) return false; @@ -263,6 +307,96 @@ public static boolean toStringOfFcTokenAllowanceIdSet( return true; } + public static boolean toStringOfApprovalForAllAllowances( + @NonNull final StringBuilder sb, @Nullable List approvals) { + if (approvals == null || approvals.isEmpty()) { + return false; + } + + final var orderedApprovals = approvals.stream().sorted().toList(); + sb.append("("); + for (final var approval : orderedApprovals) { + toStringOfApprovalForAllAllowance(sb, approval); + sb.append(","); + } + sb.setLength(sb.length() - 1); + sb.append(")"); + return true; + } + + private static void toStringOfApprovalForAllAllowance(StringBuilder sb, AccountApprovalForAllAllowance approval) { + if (approval == null) { + return; + } + + sb.append("("); + toStringOfTokenId(sb, approval.tokenId()); + sb.append(","); + toStringOfAccountId(sb, approval.spenderId()); + sb.append(")"); + } + + public static boolean toStringOfAccountCryptoAllowances( + @NonNull final StringBuilder sb, @Nullable List allowances) { + if (allowances == null || allowances.isEmpty()) { + return false; + } + + final var orderedAllowances = allowances.stream().sorted().toList(); + sb.append("("); + for (final var allowance : orderedAllowances) { + toStringOfAccountCryptoAllowance(sb, allowance); + sb.append(","); + } + sb.setLength(sb.length() - 1); + sb.append(")"); + return true; + } + + private static void toStringOfAccountCryptoAllowance(StringBuilder sb, AccountCryptoAllowance allowance) { + if (allowance == null) { + return; + } + + sb.append("("); + toStringOfAccountId(sb, allowance.spenderId()); + sb.append(","); + sb.append(allowance.amount()); + sb.append(")"); + } + + public static boolean toStringOfAccountFungibleTokenAllowances( + @NonNull final StringBuilder sb, @Nullable List allowances) { + if (allowances == null || allowances.isEmpty()) { + return false; + } + + final var orderedAllowances = allowances.stream().sorted().toList(); + sb.append("("); + for (final var allowance : orderedAllowances) { + toStringOfAccountFungibleTokenAllowance(sb, allowance); + sb.append(","); + } + sb.setLength(sb.length() - 1); + sb.append(")"); + return true; + } + + private static void toStringOfAccountFungibleTokenAllowance( + StringBuilder sb, AccountFungibleTokenAllowance allowance) { + if (allowance == null) { + return; + } + + sb.append("("); + toStringOfTokenId(sb, allowance.tokenId()); + sb.append(","); + toStringOfAccountId(sb, allowance.spenderId()); + sb.append(","); + sb.append(allowance.amount()); + sb.append(")"); + } + public static boolean toStringOfMapEnLong( @NonNull final StringBuilder sb, @Nullable final Map map) { if (map == null || map.isEmpty()) return false; @@ -424,5 +558,11 @@ public static Function getMaybeStringifyByteString(@NonNull fina return bs -> toStringPossibleHumanReadableByteArray(fieldSeparator, bs); } - private ThingsToStrings() {} + public static boolean getMaybeStringifyByteString(@NonNull final StringBuilder sb, @Nullable final Bytes bytes) { + if (bytes == null) { + return false; + } + sb.append(toStringPossibleHumanReadableByteArray(";", bytes.toByteArray())); + return true; + } } diff --git a/hedera-node/test-clients/src/itest/java/ConcurrentSuites.java b/hedera-node/test-clients/src/itest/java/ConcurrentSuites.java index 9b54f9df5601..039ab993a58f 100644 --- a/hedera-node/test-clients/src/itest/java/ConcurrentSuites.java +++ b/hedera-node/test-clients/src/itest/java/ConcurrentSuites.java @@ -39,6 +39,7 @@ import com.hedera.services.bdd.suites.contract.openzeppelin.ERC721ContractInteractions; import com.hedera.services.bdd.suites.contract.precompile.ApproveAllowanceSuite; import com.hedera.services.bdd.suites.contract.precompile.AssociatePrecompileSuite; +import com.hedera.services.bdd.suites.contract.precompile.AssociatePrecompileV2SecurityModelSuite; import com.hedera.services.bdd.suites.contract.precompile.ContractBurnHTSSuite; import com.hedera.services.bdd.suites.contract.precompile.ContractHTSSuite; import com.hedera.services.bdd.suites.contract.precompile.ContractKeysHTSSuite; @@ -169,6 +170,7 @@ static Supplier[] all() { TokenInfoHTSSuite::new, TokenUpdatePrecompileSuite::new, WipeTokenAccountPrecompileSuite::new, + AssociatePrecompileV2SecurityModelSuite::new, // contract.records LogsSuite::new, RecordsSuite::new, diff --git a/hedera-node/test-clients/src/itest/java/SequentialSuites.java b/hedera-node/test-clients/src/itest/java/SequentialSuites.java index 9041d8eefb2e..d6f40d97e62c 100644 --- a/hedera-node/test-clients/src/itest/java/SequentialSuites.java +++ b/hedera-node/test-clients/src/itest/java/SequentialSuites.java @@ -54,7 +54,7 @@ static Supplier[] sequentialSuites() { Create2OperationSuite::new, CannotDeleteSystemEntitiesSuite::new, Evm38ValidationSuite::new, - StakingSuite::new + StakingSuite::new, }; } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileV2SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileV2SecurityModelSuite.java new file mode 100644 index 000000000000..a8f181b7961f --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileV2SecurityModelSuite.java @@ -0,0 +1,674 @@ +/* + * Copyright (C) 2021-2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.keys.KeyShape.CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.ED25519; +import static com.hedera.services.bdd.spec.keys.KeyShape.sigs; +import static com.hedera.services.bdd.spec.keys.SigControl.ON; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel.relationshipWith; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.emptyChildRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.BUSY; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.TokenFreezeStatus.FreezeNotApplicable; +import static com.hederahashgraph.api.proto.java.TokenFreezeStatus.Frozen; +import static com.hederahashgraph.api.proto.java.TokenFreezeStatus.Unfrozen; +import static com.hederahashgraph.api.proto.java.TokenKycStatus.KycNotApplicable; +import static com.hederahashgraph.api.proto.java.TokenKycStatus.Revoked; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; +import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; + +import com.esaulpaugh.headlong.abi.Address; +import com.hedera.services.bdd.junit.HapiTest; +import com.hedera.services.bdd.junit.HapiTestSuite; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.keys.KeyShape; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.TokenType; +import java.util.List; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +@HapiTestSuite +public class AssociatePrecompileV2SecurityModelSuite extends HapiSuite { + + private static final Logger log = LogManager.getLogger(AssociatePrecompileV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 4_000_000L; + private static final long TOTAL_SUPPLY = 1_000; + private static final String SIGNER = "anybody"; + private static final String TOKEN_TREASURY = "treasury"; + public static final String ASSOCIATE_CONTRACT = "AssociateDissociate"; + public static final String NESTED_ASSOCIATE_CONTRACT = "NestedAssociateDissociate"; + private static final KeyShape THRESHOLD_KEY_SHAPE = KeyShape.threshOf(1, ED25519, CONTRACT); + private static final KeyShape DELEGATE_CONTRACT_KEY_SHAPE = KeyShape.threshOf(1, ED25519, DELEGATE_CONTRACT); + private static final String NON_FUNGIBLE_TOKEN = "nonFungibleToken"; + private static final String FUNGIBLE_TOKEN = "fungibleToken"; + private static final String ACCOUNT = "anybody"; + private static final String FROZEN_TOKEN = "Frozen token"; + private static final String UNFROZEN_TOKEN = "Unfrozen token"; + private static final String KYC_TOKEN = "KYC token"; + private static final String FREEZE_KEY = "Freeze key"; + private static final String KYC_KEY = "KYC key"; + private static final String ADMIN_KEY = "Admin key"; + private static final String CONTRACT_KEY = "ContractKey"; + private static final String MINT_TOKEN_CONTRACT = "MixedMintToken"; + + public static void main(String... args) { + new AssociatePrecompileV2SecurityModelSuite().runSuiteAsync(); + } + + @Override + public boolean canRunConcurrent() { + return true; + } + + @Override + public List getSpecsInSuite() { + return allOf(positiveSpecs(), negativeSpecs()); + } + + List negativeSpecs() { + return List.of( + v2Security006TokenAssociateNegativeTests(), V2Security041TokenAssociateFromStaticcallAndCallcode()); + } + + List positiveSpecs() { + return List.of( + v2Security031AssociateSingleTokenWithDelegateContractKey(), + v2Security010NestedAssociateNftAndNonFungibleTokens(), + V2Security036TokenAssociateFromDelegateCallWithDelegateContractId()); + } + + @HapiTest + final HapiSpec v2Security031AssociateSingleTokenWithDelegateContractKey() { + + return defaultHapiSpec("v2Security031AssociateSingleTokenWithDelegateContractKey") + .given( + newKeyNamed(FREEZE_KEY), + newKeyNamed(KYC_KEY), + cryptoCreate(TOKEN_TREASURY).balance(ONE_HUNDRED_HBARS), + cryptoCreate(SIGNER).balance(ONE_MILLION_HBARS), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .supplyKey(TOKEN_TREASURY) + .adminKey(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .initialSupply(0) + .treasury(TOKEN_TREASURY) + .adminKey(TOKEN_TREASURY) + .supplyKey(TOKEN_TREASURY), + tokenCreate(FROZEN_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .initialSupply(TOTAL_SUPPLY) + .freezeKey(FREEZE_KEY) + .freezeDefault(true) + .adminKey(TOKEN_TREASURY) + .supplyKey(TOKEN_TREASURY), + tokenCreate(UNFROZEN_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .freezeKey(FREEZE_KEY) + .freezeDefault(false) + .adminKey(TOKEN_TREASURY) + .supplyKey(TOKEN_TREASURY), + tokenCreate(KYC_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .kycKey(KYC_KEY) + .adminKey(TOKEN_TREASURY) + .supplyKey(TOKEN_TREASURY), + uploadInitCode(ASSOCIATE_CONTRACT, MINT_TOKEN_CONTRACT), + contractCreate(MINT_TOKEN_CONTRACT), + contractCreate(ASSOCIATE_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + newKeyNamed(CONTRACT_KEY).shape(THRESHOLD_KEY_SHAPE.signedWith(sigs(ON, ASSOCIATE_CONTRACT))), + cryptoUpdate(SIGNER).key(CONTRACT_KEY), + tokenUpdate(FUNGIBLE_TOKEN).supplyKey(CONTRACT_KEY), + // Test Case 1: Account paying and signing a fungible TOKEN ASSOCIATE TRANSACTION, + // when signer has a threshold key + // associating ACCOUNT to the token + // SIGNER → call → CONTRACT A → call → HTS + contractCall( + ASSOCIATE_CONTRACT, + "tokenAssociate", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN)))) + .signedBy(SIGNER) + .payingWith(SIGNER) + .hasRetryPrecheckFrom(BUSY) + .via("fungibleTokenAssociate") + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS), + // Test Case 2: Account paying and signing a non fungible TOKEN ASSOCIATE TRANSACTION, + // when signer has a threshold key + // associating ACCOUNT to the token + // SIGNER → call → CONTRACT A → call → HTS + tokenUpdate(NON_FUNGIBLE_TOKEN).supplyKey(CONTRACT_KEY), + contractCall( + ASSOCIATE_CONTRACT, + "tokenAssociate", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN)))) + .signedBy(SIGNER) + .payingWith(SIGNER) + .hasRetryPrecheckFrom(BUSY) + .via("nonFungibleTokenAssociate") + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS), + // Test Case 3: Account paying and signing a multiple TOKENS ASSOCIATE TRANSACTION, + // when signer has a threshold key + // SIGNER → call → CONTRACT A → call → HTS + tokenUpdate(FROZEN_TOKEN).supplyKey(CONTRACT_KEY), + tokenUpdate(UNFROZEN_TOKEN).supplyKey(CONTRACT_KEY), + tokenUpdate(KYC_TOKEN).supplyKey(CONTRACT_KEY), + contractCall( + ASSOCIATE_CONTRACT, + "tokensAssociate", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + new Address[] { + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FROZEN_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(UNFROZEN_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(KYC_TOKEN))), + }) + .signedBy(SIGNER) + .payingWith(SIGNER) + .hasRetryPrecheckFrom(BUSY) + .via("multipleTokensAssociate") + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS)))) + .then(getAccountInfo(ACCOUNT) + .hasToken(relationshipWith(FUNGIBLE_TOKEN) + .kyc(KycNotApplicable) + .freeze(FreezeNotApplicable)) + .hasToken(relationshipWith(NON_FUNGIBLE_TOKEN) + .kyc(KycNotApplicable) + .freeze(FreezeNotApplicable)) + .hasToken(relationshipWith(FROZEN_TOKEN) + .kyc(KycNotApplicable) + .freeze(Frozen)) + .hasToken(relationshipWith(UNFROZEN_TOKEN) + .kyc(KycNotApplicable) + .freeze(Unfrozen)) + .hasToken(relationshipWith(KYC_TOKEN).kyc(Revoked).freeze(FreezeNotApplicable))); + } + + @HapiTest + final HapiSpec v2Security006TokenAssociateNegativeTests() { + return defaultHapiSpec("v2Security006TokenAssociateNegativeTests") + .given( + newKeyNamed(FREEZE_KEY), + newKeyNamed(KYC_KEY), + newKeyNamed(ADMIN_KEY), + cryptoCreate(TOKEN_TREASURY).balance(ONE_HUNDRED_HBARS), + cryptoCreate(SIGNER).balance(ONE_HUNDRED_HBARS), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .supplyKey(TOKEN_TREASURY) + .adminKey(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .initialSupply(0) + .treasury(TOKEN_TREASURY) + .adminKey(ADMIN_KEY) + .supplyKey(TOKEN_TREASURY), + tokenCreate(FROZEN_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .initialSupply(TOTAL_SUPPLY) + .freezeKey(FREEZE_KEY) + .freezeDefault(true) + .adminKey(TOKEN_TREASURY) + .supplyKey(TOKEN_TREASURY), + tokenCreate(UNFROZEN_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .freezeKey(FREEZE_KEY) + .freezeDefault(false) + .adminKey(TOKEN_TREASURY) + .supplyKey(TOKEN_TREASURY), + tokenCreate(KYC_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .kycKey(KYC_KEY) + .adminKey(TOKEN_TREASURY) + .supplyKey(TOKEN_TREASURY), + uploadInitCode(ASSOCIATE_CONTRACT, NESTED_ASSOCIATE_CONTRACT, MINT_TOKEN_CONTRACT), + contractCreate(ASSOCIATE_CONTRACT), + contractCreate(MINT_TOKEN_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + NESTED_ASSOCIATE_CONTRACT, + asHeadlongAddress(getNestedContractAddress(ASSOCIATE_CONTRACT, spec))), + // Test Case 1: SIGNER account paying and signing a fungible TOKEN ASSOCIATE TRANSACTION, + // associating token to ACCOUNT + // SIGNER → call → CONTRACT A → call → HTS + contractCall( + ASSOCIATE_CONTRACT, + "tokenAssociate", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN)))) + .payingWith(ACCOUNT) + .signedBy(ACCOUNT) + .hasRetryPrecheckFrom(BUSY) + .via("fungibleTokenAssociate") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + getTxnRecord("fungibleTokenAssociate") + .andAllChildRecords() + .logged(), + // Test Case 2: SIGNER account paying and signing a non fungible TOKEN ASSOCIATE TRANSACTION, + // associating to ACCOUNT + // SIGNER → call → CONTRACT A → call → HTS + contractCall( + ASSOCIATE_CONTRACT, + "tokenAssociate", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN)))) + .payingWith(ACCOUNT) + .signedBy(ACCOUNT) + .hasRetryPrecheckFrom(BUSY) + .via("nonFungibleTokenAssociate") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + getTxnRecord("nonFungibleTokenAssociate") + .andAllChildRecords() + .logged(), + // Test Case 3: SIGNER account paying and signing multiple TOKENS ASSOCIATE TRANSACTION, + // associating to ЕОА ACCOUNT + // SIGNER → call → CONTRACT A → call → HTS + contractCall( + ASSOCIATE_CONTRACT, + "tokensAssociate", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + new Address[] { + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FROZEN_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(UNFROZEN_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(KYC_TOKEN))) + }) + .signedBy(ACCOUNT) + .payingWith(ACCOUNT) + .hasRetryPrecheckFrom(BUSY) + .via("multipleTokensAssociate") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + getTxnRecord("multipleTokensAssociate") + .andAllChildRecords() + .logged(), + // Test Case 4: SIGNER account paying and signing nested TOKEN ASSOCIATE TRANSACTION, + // associating to ЕОА ACCOUNT + // SIGNER → call → CONTRACT A → call → HTS + contractCall( + NESTED_ASSOCIATE_CONTRACT, + "associateInternalContractCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN)))) + .signedBy(ACCOUNT) + .payingWith(ACCOUNT) + .hasRetryPrecheckFrom(BUSY) + .via("nestedAssociateFungibleTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + getTxnRecord("nestedAssociateFungibleTxn") + .andAllChildRecords() + .logged(), + // Test Case 5: SIGNER account paying and signing a fungible TOKEN ASSOCIATE TRANSACTION, + // associating to CONTRACT + // when signer has a threshold key + // SIGNER → call → CONTRACT A → call → HTS + newKeyNamed(CONTRACT_KEY).shape(THRESHOLD_KEY_SHAPE.signedWith(sigs(ON, MINT_TOKEN_CONTRACT))), + cryptoUpdate(SIGNER).key(CONTRACT_KEY), + tokenUpdate(FUNGIBLE_TOKEN).supplyKey(CONTRACT_KEY), + contractCall( + ASSOCIATE_CONTRACT, + "tokenAssociate", + asHeadlongAddress(getNestedContractAddress(MINT_TOKEN_CONTRACT, spec)), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN)))) + .signedBy(SIGNER) + .payingWith(SIGNER) + .hasRetryPrecheckFrom(BUSY) + .via("associateTokenToContractFails") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + getTxnRecord("associateTokenToContractFails") + .andAllChildRecords() + .logged()))) + .then( + childRecordsCheck( + "fungibleTokenAssociate", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + childRecordsCheck( + "nonFungibleTokenAssociate", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + childRecordsCheck( + "multipleTokensAssociate", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + childRecordsCheck( + "nestedAssociateFungibleTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + childRecordsCheck( + "associateTokenToContractFails", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE))))); + } + + @HapiTest + final HapiSpec v2Security010NestedAssociateNftAndNonFungibleTokens() { + + return defaultHapiSpec("v2Security010NestedAssociateNftAndNonFungibleTokens") + .given( + cryptoCreate(ACCOUNT).balance(ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .supplyKey(TOKEN_TREASURY) + .adminKey(TOKEN_TREASURY) + .treasury(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(NON_FUNGIBLE_UNIQUE) + .supplyKey(TOKEN_TREASURY) + .initialSupply(0) + .adminKey(TOKEN_TREASURY) + .treasury(TOKEN_TREASURY), + uploadInitCode(ASSOCIATE_CONTRACT, NESTED_ASSOCIATE_CONTRACT), + contractCreate(ASSOCIATE_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + NESTED_ASSOCIATE_CONTRACT, + asHeadlongAddress(getNestedContractAddress(ASSOCIATE_CONTRACT, spec))), + // Test Case 1: Account paying and signing a nested fungible TOKEN ASSOCIATE TRANSACTION, + // when we associate the token to the signer + // SIGNER → call → CONTRACT A → call → CONTRACT B → call → PRECOMPILE(HTS) + newKeyNamed(CONTRACT_KEY).shape(THRESHOLD_KEY_SHAPE.signedWith(sigs(ON, ASSOCIATE_CONTRACT))), + cryptoUpdate(ACCOUNT).key(CONTRACT_KEY), + contractCall( + NESTED_ASSOCIATE_CONTRACT, + "associateInternalContractCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN)))) + .signedBy(ACCOUNT) + .payingWith(ACCOUNT) + .hasRetryPrecheckFrom(BUSY) + .via("nestedAssociateFungibleTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS), + getTxnRecord("nestedAssociateFungibleTxn") + .andAllChildRecords() + .logged(), + // Test Case 2: Account paying and signing a nested non fungible TOKEN ASSOCIATE TRANSACTION, + // when we associate the token to the signer + // SIGNER → call → CONTRACT A → call → CONTRACT B → call → PRECOMPILE(HTS) + contractCall( + NESTED_ASSOCIATE_CONTRACT, + "associateInternalContractCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN)))) + .signedBy(ACCOUNT) + .payingWith(ACCOUNT) + .hasRetryPrecheckFrom(BUSY) + .via("nestedAssociateNonFungibleTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS)))) + .then( + getAccountInfo(ACCOUNT) + .hasToken(relationshipWith(FUNGIBLE_TOKEN) + .kyc(KycNotApplicable) + .freeze(FreezeNotApplicable)) + .hasToken(relationshipWith(NON_FUNGIBLE_TOKEN) + .kyc(KycNotApplicable) + .freeze(FreezeNotApplicable)), + childRecordsCheck( + "nestedAssociateFungibleTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + childRecordsCheck( + "nestedAssociateNonFungibleTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))))); + } + + @HapiTest + final HapiSpec V2Security036TokenAssociateFromDelegateCallWithDelegateContractId() { + + return defaultHapiSpec("v2Security010NestedAssociateNftAndNonFungibleTokens") + .given( + cryptoCreate(ACCOUNT).balance(ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .supplyKey(TOKEN_TREASURY) + .adminKey(TOKEN_TREASURY) + .treasury(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(NON_FUNGIBLE_UNIQUE) + .supplyKey(TOKEN_TREASURY) + .initialSupply(0) + .adminKey(TOKEN_TREASURY) + .treasury(TOKEN_TREASURY), + uploadInitCode(ASSOCIATE_CONTRACT, NESTED_ASSOCIATE_CONTRACT), + contractCreate(ASSOCIATE_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + NESTED_ASSOCIATE_CONTRACT, + asHeadlongAddress(getNestedContractAddress(ASSOCIATE_CONTRACT, spec))), + // SIGNER → call → CONTRACT A → delegatecall → CONTRACT B → call → PRECOMPILE(HTS) + newKeyNamed(CONTRACT_KEY) + .shape(DELEGATE_CONTRACT_KEY_SHAPE.signedWith(sigs(ON, NESTED_ASSOCIATE_CONTRACT))), + cryptoUpdate(ACCOUNT).key(CONTRACT_KEY), + contractCall( + NESTED_ASSOCIATE_CONTRACT, + "associateDelegateCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN)))) + .signedBy(ACCOUNT) + .payingWith(ACCOUNT) + .hasRetryPrecheckFrom(BUSY) + .via("nestedAssociateFungibleTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS), + getTxnRecord("nestedAssociateFungibleTxn") + .andAllChildRecords() + .logged(), + // non fungible token + contractCall( + NESTED_ASSOCIATE_CONTRACT, + "associateDelegateCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN)))) + .signedBy(ACCOUNT) + .payingWith(ACCOUNT) + .hasRetryPrecheckFrom(BUSY) + .via("nestedAssociateNonFungibleTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS), + getTxnRecord("nestedAssociateNonFungibleTxn") + .andAllChildRecords() + .logged()))) + .then( + getAccountInfo(ACCOUNT) + .hasToken(relationshipWith(FUNGIBLE_TOKEN) + .kyc(KycNotApplicable) + .freeze(FreezeNotApplicable)) + .hasToken(relationshipWith(NON_FUNGIBLE_TOKEN) + .kyc(KycNotApplicable) + .freeze(FreezeNotApplicable)), + childRecordsCheck( + "nestedAssociateFungibleTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + childRecordsCheck( + "nestedAssociateNonFungibleTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))))); + } + + @HapiTest + final HapiSpec V2Security041TokenAssociateFromStaticcallAndCallcode() { + + return defaultHapiSpec("V2Security041TokenAssociateFromStaticcallAndCallcode") + .given( + cryptoCreate(ACCOUNT).balance(ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .supplyKey(TOKEN_TREASURY) + .adminKey(TOKEN_TREASURY) + .treasury(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(NON_FUNGIBLE_UNIQUE) + .supplyKey(TOKEN_TREASURY) + .initialSupply(0) + .adminKey(TOKEN_TREASURY) + .treasury(TOKEN_TREASURY), + uploadInitCode(ASSOCIATE_CONTRACT, NESTED_ASSOCIATE_CONTRACT), + contractCreate(ASSOCIATE_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + NESTED_ASSOCIATE_CONTRACT, + asHeadlongAddress(getNestedContractAddress(ASSOCIATE_CONTRACT, spec))), + // SIGNER → call → CONTRACT A → staticcall → CONTRACT B → call → PRECOMPILE(HTS) + newKeyNamed(CONTRACT_KEY) + .shape(THRESHOLD_KEY_SHAPE.signedWith(sigs(ON, NESTED_ASSOCIATE_CONTRACT))), + cryptoUpdate(ACCOUNT).key(CONTRACT_KEY), + contractCall( + NESTED_ASSOCIATE_CONTRACT, + "associateStaticCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN)))) + .signedBy(ACCOUNT) + .payingWith(ACCOUNT) + .hasRetryPrecheckFrom(BUSY) + .via("associateStaticcallFungibleTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + getTxnRecord("associateStaticcallFungibleTxn") + .andAllChildRecords() + .logged()))) + .then( + emptyChildRecordsCheck("associateStaticcallFungibleTxn", CONTRACT_REVERT_EXECUTED), + getAccountInfo(ACCOUNT).hasNoTokenRelationship(FUNGIBLE_TOKEN)); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSV2SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSV2SecurityModelSuite.java index 07649df9d02a..da90d9a91d50 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSV2SecurityModelSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSV2SecurityModelSuite.java @@ -103,7 +103,7 @@ public class ContractMintHTSV2SecurityModelSuite extends HapiSuite { private static final String MINT_TOKEN_VIA_DELEGATE_CALL = "MixedMintToken"; public static void main(final String... args) { - new ContractMintHTSV2SecurityModelSuite().runSuiteSync(); + new ContractMintHTSV2SecurityModelSuite().runSuiteAsync(); } public List getSpecsInSuite() { diff --git a/hedera-node/test-clients/src/main/resource/contract/contracts/AssociateDissociate/AssociateDissociate.bin b/hedera-node/test-clients/src/main/resource/contract/contracts/AssociateDissociate/AssociateDissociate.bin index 29f6fb29fca0..39a785c450a4 100644 --- a/hedera-node/test-clients/src/main/resource/contract/contracts/AssociateDissociate/AssociateDissociate.bin +++ b/hedera-node/test-clients/src/main/resource/contract/contracts/AssociateDissociate/AssociateDissociate.bin @@ -1 +1 @@ -608060405234801561001057600080fd5b50610c21806100206000396000f3fe608060405234801561001057600080fd5b506004361061004c5760003560e01c80634753b51b146100515780637f6314d0146100b5578063e0d19a8b14610119578063fef03573146101f1575b600080fd5b6100b36004803603604081101561006757600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff1690602001909291905050506102c9565b005b610117600480360360408110156100cb57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050610355565b005b6101ef6004803603604081101561012f57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff1690602001909291908035906020019064010000000081111561016c57600080fd5b82018360208201111561017e57600080fd5b803590602001918460208302840111640100000000831117156101a057600080fd5b919080806020026020016040519081016040528093929190818152602001838360200280828437600081840152601f19601f8201169050808301925050505050505091929192905050506103e1565b005b6102c76004803603604081101561020757600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff1690602001909291908035906020019064010000000081111561024457600080fd5b82018360208201111561025657600080fd5b8035906020019184602083028401116401000000008311171561027857600080fd5b919080806020026020016040519081016040528093929190818152602001838360200280828437600081840152601f19601f82011690508083019250505050505050919291929050505061046d565b005b60006102d583836104f9565b9050601660030b8114610350576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260118152602001807f446973736f6369617465204661696c656400000000000000000000000000000081525060200191505060405180910390fd5b505050565b600061036183836106a0565b9050601660030b81146103dc576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260108152602001807f4173736f6369617465204661696c65640000000000000000000000000000000081525060200191505060405180910390fd5b505050565b60006103ed8383610847565b9050601660030b8114610468576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252601c8152602001807f4d756c7469706c65204173736f63696174696f6e73204661696c65640000000081525060200191505060405180910390fd5b505050565b60006104798383610a19565b9050601660030b81146104f4576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252601d8152602001807f4d756c7469706c6520446973736f63696174696f6e73204661696c656400000081525060200191505060405180910390fd5b505050565b600080606061016773ffffffffffffffffffffffffffffffffffffffff1663099794e860e01b8686604051602401808373ffffffffffffffffffffffffffffffffffffffff1681526020018273ffffffffffffffffffffffffffffffffffffffff16815260200192505050604051602081830303815290604052907bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040518082805190602001908083835b602083106105f457805182526020820191506020810190506020830392506105d1565b6001836020036101000a0380198251168184511680821785525050505050509050019150506000604051808303816000865af19150503d8060008114610656576040519150601f19603f3d011682016040523d82523d6000602084013e61065b565b606091505b50915091508161066c576015610693565b80806020019051602081101561068157600080fd5b81019080805190602001909291905050505b60030b9250505092915050565b600080606061016773ffffffffffffffffffffffffffffffffffffffff166349146bde60e01b8686604051602401808373ffffffffffffffffffffffffffffffffffffffff1681526020018273ffffffffffffffffffffffffffffffffffffffff16815260200192505050604051602081830303815290604052907bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040518082805190602001908083835b6020831061079b5780518252602082019150602081019050602083039250610778565b6001836020036101000a0380198251168184511680821785525050505050509050019150506000604051808303816000865af19150503d80600081146107fd576040519150601f19603f3d011682016040523d82523d6000602084013e610802565b606091505b50915091508161081357601561083a565b80806020019051602081101561082857600080fd5b81019080805190602001909291905050505b60030b9250505092915050565b600080606061016773ffffffffffffffffffffffffffffffffffffffff16632e63879b60e01b8686604051602401808373ffffffffffffffffffffffffffffffffffffffff16815260200180602001828103825283818151815260200191508051906020019060200280838360005b838110156108d15780820151818401526020810190506108b6565b505050509050019350505050604051602081830303815290604052907bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040518082805190602001908083835b6020831061096d578051825260208201915060208101905060208303925061094a565b6001836020036101000a0380198251168184511680821785525050505050509050019150506000604051808303816000865af19150503d80600081146109cf576040519150601f19603f3d011682016040523d82523d6000602084013e6109d4565b606091505b5091509150816109e5576015610a0c565b8080602001905160208110156109fa57600080fd5b81019080805190602001909291905050505b60030b9250505092915050565b600080606061016773ffffffffffffffffffffffffffffffffffffffff166378b6391860e01b8686604051602401808373ffffffffffffffffffffffffffffffffffffffff16815260200180602001828103825283818151815260200191508051906020019060200280838360005b83811015610aa3578082015181840152602081019050610a88565b505050509050019350505050604051602081830303815290604052907bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040518082805190602001908083835b60208310610b3f5780518252602082019150602081019050602083039250610b1c565b6001836020036101000a0380198251168184511680821785525050505050509050019150506000604051808303816000865af19150503d8060008114610ba1576040519150601f19603f3d011682016040523d82523d6000602084013e610ba6565b606091505b509150915081610bb7576015610bde565b808060200190516020811015610bcc57600080fd5b81019080805190602001909291905050505b60030b925050509291505056fea26469706673582212207e94e91430f9dfed904b983c08fc96c3f98cb6703571771233534e51051a5fee64736f6c634300060c0033 \ No newline at end of file +0x608060405234801561001057600080fd5b50610791806100206000396000f3fe608060405234801561001057600080fd5b50600436106100675760003560e01c80637f6314d0116100505780637f6314d0146100a6578063e0d19a8b146100b9578063fef03573146100cc57600080fd5b80634753b51b1461006c5780637c41ad2c14610081575b600080fd5b61007f61007a366004610546565b6100df565b005b61009461008f366004610579565b610147565b60405190815260200160405180910390f35b61007f6100b4366004610546565b610240565b61007f6100c73660046105ca565b61029e565b61007f6100da3660046105ca565b6102fc565b60006100eb838361035a565b9050601681146101425760405162461bcd60e51b815260206004820152601160248201527f446973736f6369617465204661696c656400000000000000000000000000000060448201526064015b60405180910390fd5b505050565b604080516001600160a01b03831660248083019190915282518083039091018152604490910182526020810180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167f7c41ad2c00000000000000000000000000000000000000000000000000000000179052905160009182918291610167916101ce91906106a2565b6000604051808303816000865af19150503d806000811461020b576040519150601f19603f3d011682016040523d82523d6000602084013e610210565b606091505b509150915081610221576015610235565b8080602001905181019061023591906106dd565b60030b949350505050565b600061024c8383610483565b9050601681146101425760405162461bcd60e51b815260206004820152601060248201527f4173736f6369617465204661696c6564000000000000000000000000000000006044820152606401610139565b60006102aa83836104d2565b9050601681146101425760405162461bcd60e51b815260206004820152601c60248201527f4d756c7469706c65204173736f63696174696f6e73204661696c6564000000006044820152606401610139565b600061030883836104fe565b9050601681146101425760405162461bcd60e51b815260206004820152601d60248201527f4d756c7469706c6520446973736f63696174696f6e73204661696c65640000006044820152606401610139565b6040516001600160a01b0383811660248301528216604482015260009081908190610167907f099794e800000000000000000000000000000000000000000000000000000000906064015b60408051601f198184030181529181526020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167fffffffff0000000000000000000000000000000000000000000000000000000090941693909317909252905161041091906106a2565b6000604051808303816000865af19150503d806000811461044d576040519150601f19603f3d011682016040523d82523d6000602084013e610452565b606091505b509150915081610463576015610477565b8080602001905181019061047791906106dd565b60030b95945050505050565b6040516001600160a01b0383811660248301528216604482015260009081908190610167907f49146bde00000000000000000000000000000000000000000000000000000000906064016103a5565b60008060006101676001600160a01b0316632e63879b60e01b86866040516024016103a5929190610700565b60008060006101676001600160a01b03166378b6391860e01b86866040516024016103a5929190610700565b80356001600160a01b038116811461054157600080fd5b919050565b6000806040838503121561055957600080fd5b6105628361052a565b91506105706020840161052a565b90509250929050565b60006020828403121561058b57600080fd5b6105948261052a565b9392505050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b600080604083850312156105dd57600080fd5b6105e68361052a565b915060208084013567ffffffffffffffff8082111561060457600080fd5b818601915086601f83011261061857600080fd5b81358181111561062a5761062a61059b565b8060051b604051601f19603f8301168101818110858211171561064f5761064f61059b565b60405291825284820192508381018501918983111561066d57600080fd5b938501935b82851015610692576106838561052a565b84529385019392850192610672565b8096505050505050509250929050565b6000825160005b818110156106c357602081860181015185830152016106a9565b818111156106d2576000828501525b509190910192915050565b6000602082840312156106ef57600080fd5b81518060030b811461059457600080fd5b6000604082016001600160a01b0380861684526020604081860152828651808552606087019150828801945060005b8181101561074d57855185168352948301949183019160010161072f565b50909897505050505050505056fea2646970667358221220789f3f5d595f8401d3bfc78bb33e77b4014f2ccd0ca3c3990b0707abf8b5470f64736f6c63430008090033 \ No newline at end of file diff --git a/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.bin b/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.bin index 39ad3497d1f1..1bd1a5b68fc6 100644 --- a/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.bin +++ b/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.bin @@ -1 +1 @@ -608060405234801561001057600080fd5b506040516110123803806110128339818101604052602081101561003357600080fd5b8101908080519060200190929190505050806000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555050610f7e806100946000396000f3fe608060405234801561001057600080fd5b50600436106100625760003560e01c80630d543f5f146100675780631f227fc3146100cb5780636f51648f1461012f578063e18a006f14610193578063e6bd9572146101f7578063f4b211681461025b575b600080fd5b6100c96004803603604081101561007d57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff1690602001909291905050506102bf565b005b61012d600480360360408110156100e157600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff1690602001909291905050506104d2565b005b6101916004803603604081101561014557600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff1690602001909291905050506106e5565b005b6101f5600480360360408110156101a957600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff1690602001909291905050506108f8565b005b6102596004803603604081101561020d57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050610b0b565b005b6102bd6004803603604081101561027157600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050610c56565b005b6000606060008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168484604051602401808373ffffffffffffffffffffffffffffffffffffffff1681526020018273ffffffffffffffffffffffffffffffffffffffff168152602001925050506040516020818303038152906040527f4753b51b000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040518082805190602001908083835b602083106103ef57805182526020820191506020810190506020830392506103cc565b6001836020036101000a038019825116818451168082178552505050505050905001915050600060405180830381855af49150503d806000811461044f576040519150601f19603f3d011682016040523d82523d6000602084013e610454565b606091505b5091509150816104cc576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260208152602001807f44656c656761746520646973736f63696174652063616c6c206661696c65642181525060200191505060405180910390fd5b50505050565b6000606060008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168484604051602401808373ffffffffffffffffffffffffffffffffffffffff1681526020018273ffffffffffffffffffffffffffffffffffffffff168152602001925050506040516020818303038152906040527f4753b51b000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040518082805190602001908083835b6020831061060257805182526020820191506020810190506020830392506105df565b6001836020036101000a038019825116818451168082178552505050505050905001915050600060405180830381855afa9150503d8060008114610662576040519150601f19603f3d011682016040523d82523d6000602084013e610667565b606091505b5091509150816106df576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252601e8152602001807f53746174696320646973736f63696174652063616c6c206661696c656421000081525060200191505060405180910390fd5b50505050565b6000606060008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168484604051602401808373ffffffffffffffffffffffffffffffffffffffff1681526020018273ffffffffffffffffffffffffffffffffffffffff168152602001925050506040516020818303038152906040527f7f6314d0000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040518082805190602001908083835b6020831061081557805182526020820191506020810190506020830392506107f2565b6001836020036101000a038019825116818451168082178552505050505050905001915050600060405180830381855afa9150503d8060008114610875576040519150601f19603f3d011682016040523d82523d6000602084013e61087a565b606091505b5091509150816108f2576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252601d8152602001807f537461746963206173736f63696174652063616c6c206661696c65642100000081525060200191505060405180910390fd5b50505050565b6000606060008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168484604051602401808373ffffffffffffffffffffffffffffffffffffffff1681526020018273ffffffffffffffffffffffffffffffffffffffff168152602001925050506040516020818303038152906040527f7f6314d0000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040518082805190602001908083835b60208310610a285780518252602082019150602081019050602083039250610a05565b6001836020036101000a038019825116818451168082178552505050505050905001915050600060405180830381855af49150503d8060008114610a88576040519150601f19603f3d011682016040523d82523d6000602084013e610a8d565b606091505b509150915081610b05576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252601f8152602001807f44656c6567617465206173736f63696174652063616c6c206661696c6564210081525060200191505060405180910390fd5b50505050565b6000610b178383610da1565b9050601660030b8114610b92576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260118152602001807f446973736f6369617465204661696c656400000000000000000000000000000081525060200191505060405180910390fd5b60008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16637f6314d084846040518363ffffffff1660e01b8152600401808373ffffffffffffffffffffffffffffffffffffffff1681526020018273ffffffffffffffffffffffffffffffffffffffff16815260200192505050600060405180830381600087803b158015610c3957600080fd5b505af1158015610c4d573d6000803e3d6000fd5b50505050505050565b60008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16637f6314d083836040518363ffffffff1660e01b8152600401808373ffffffffffffffffffffffffffffffffffffffff1681526020018273ffffffffffffffffffffffffffffffffffffffff16815260200192505050600060405180830381600087803b158015610cfd57600080fd5b505af1158015610d11573d6000803e3d6000fd5b505050506000610d218383610da1565b9050601660030b8114610d9c576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260118152602001807f446973736f6369617465204661696c656400000000000000000000000000000081525060200191505060405180910390fd5b505050565b600080606061016773ffffffffffffffffffffffffffffffffffffffff1663099794e860e01b8686604051602401808373ffffffffffffffffffffffffffffffffffffffff1681526020018273ffffffffffffffffffffffffffffffffffffffff16815260200192505050604051602081830303815290604052907bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040518082805190602001908083835b60208310610e9c5780518252602082019150602081019050602083039250610e79565b6001836020036101000a0380198251168184511680821785525050505050509050019150506000604051808303816000865af19150503d8060008114610efe576040519150601f19603f3d011682016040523d82523d6000602084013e610f03565b606091505b509150915081610f14576015610f3b565b808060200190516020811015610f2957600080fd5b81019080805190602001909291905050505b60030b925050509291505056fea26469706673582212203911c66cfc0daa049aea4df10ac7d742c7c14d89df04ba665b21e061b0c9080464736f6c634300060c0033 \ No newline at end of file +0x608060405234801561001057600080fd5b50604051610a60380380610a6083398101604081905261002f91610054565b600080546001600160a01b0319166001600160a01b0392909216919091179055610084565b60006020828403121561006657600080fd5b81516001600160a01b038116811461007d57600080fd5b9392505050565b6109cd806100936000396000f3fe608060405234801561001057600080fd5b50600436106100885760003560e01c80637c41ad2c1161005b5780637c41ad2c146100db578063e18a006f14610100578063e6bd957214610113578063f4b211681461012657600080fd5b80630d543f5f1461008d5780631f227fc3146100a25780632194f6eb146100b55780636f51648f146100c8575b600080fd5b6100a061009b3660046108e4565b610139565b005b6100a06100b03660046108e4565b610231565b6100a06100c33660046108e4565b61031e565b6100a06100d63660046108e4565b610389565b6100ee6100e9366004610917565b610476565b60405190815260200160405180910390f35b6100a061010e3660046108e4565b61055a565b6100a06101213660046108e4565b610647565b6100a06101343660046108e4565b610711565b600080546040516001600160a01b03858116602483015284811660448301528392169060640160408051601f198184030181529181526020820180516001600160e01b0316634753b51b60e01b179052516101949190610939565b600060405180830381855af49150503d80600081146101cf576040519150601f19603f3d011682016040523d82523d6000602084013e6101d4565b606091505b50915091508161022b5760405162461bcd60e51b815260206004820181905260248201527f44656c656761746520646973736f63696174652063616c6c206661696c65642160448201526064015b60405180910390fd5b50505050565b600080546040516001600160a01b03858116602483015284811660448301528392169060640160408051601f198184030181529181526020820180516001600160e01b0316634753b51b60e01b1790525161028c9190610939565b600060405180830381855afa9150503d80600081146102c7576040519150601f19603f3d011682016040523d82523d6000602084013e6102cc565b606091505b50915091508161022b5760405162461bcd60e51b815260206004820152601e60248201527f53746174696320646973736f63696174652063616c6c206661696c65642100006044820152606401610222565b6000546040516307f6314d60e41b81526001600160a01b038481166004830152838116602483015290911690637f6314d090604401600060405180830381600087803b15801561036d57600080fd5b505af1158015610381573d6000803e3d6000fd5b505050505050565b600080546040516001600160a01b03858116602483015284811660448301528392169060640160408051601f198184030181529181526020820180516001600160e01b03166307f6314d60e41b179052516103e49190610939565b600060405180830381855afa9150503d806000811461041f576040519150601f19603f3d011682016040523d82523d6000602084013e610424565b606091505b50915091508161022b5760405162461bcd60e51b815260206004820152601d60248201527f537461746963206173736f63696174652063616c6c206661696c6564210000006044820152606401610222565b604080516001600160a01b03831660248083019190915282518083039091018152604490910182526020810180516001600160e01b03167f7c41ad2c00000000000000000000000000000000000000000000000000000000179052905160009182918291610167916104e89190610939565b6000604051808303816000865af19150503d8060008114610525576040519150601f19603f3d011682016040523d82523d6000602084013e61052a565b606091505b50915091508161053b57601561054f565b8080602001905181019061054f9190610974565b60030b949350505050565b600080546040516001600160a01b03858116602483015284811660448301528392169060640160408051601f198184030181529181526020820180516001600160e01b03166307f6314d60e41b179052516105b59190610939565b600060405180830381855af49150503d80600081146105f0576040519150601f19603f3d011682016040523d82523d6000602084013e6105f5565b606091505b50915091508161022b5760405162461bcd60e51b815260206004820152601f60248201527f44656c6567617465206173736f63696174652063616c6c206661696c656421006044820152606401610222565b600061065383836107db565b9050601681146106a55760405162461bcd60e51b815260206004820152601160248201527f446973736f6369617465204661696c65640000000000000000000000000000006044820152606401610222565b6000546040516307f6314d60e41b81526001600160a01b038581166004830152848116602483015290911690637f6314d090604401600060405180830381600087803b1580156106f457600080fd5b505af1158015610708573d6000803e3d6000fd5b50505050505050565b6000546040516307f6314d60e41b81526001600160a01b038481166004830152838116602483015290911690637f6314d090604401600060405180830381600087803b15801561076057600080fd5b505af1158015610774573d6000803e3d6000fd5b50505050600061078483836107db565b9050601681146107d65760405162461bcd60e51b815260206004820152601160248201527f446973736f6369617465204661696c65640000000000000000000000000000006044820152606401610222565b505050565b604080516001600160a01b038481166024830152831660448083019190915282518083039091018152606490910182526020810180516001600160e01b03167f099794e800000000000000000000000000000000000000000000000000000000179052905160009182918291610167916108559190610939565b6000604051808303816000865af19150503d8060008114610892576040519150601f19603f3d011682016040523d82523d6000602084013e610897565b606091505b5091509150816108a85760156108bc565b808060200190518101906108bc9190610974565b60030b95945050505050565b80356001600160a01b03811681146108df57600080fd5b919050565b600080604083850312156108f757600080fd5b610900836108c8565b915061090e602084016108c8565b90509250929050565b60006020828403121561092957600080fd5b610932826108c8565b9392505050565b6000825160005b8181101561095a5760208186018101518583015201610940565b81811115610969576000828501525b509190910192915050565b60006020828403121561098657600080fd5b81518060030b811461093257600080fdfea264697066735822122057db8e6eb936b95311e9245edf7471e4d65a99a6474895369462f9898b103cfb64736f6c63430008090033 \ No newline at end of file diff --git a/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.json b/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.json index a1b3920a5b06..afd25b10aaeb 100644 --- a/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.json +++ b/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.json @@ -46,6 +46,24 @@ "stateMutability": "nonpayable", "type": "function" }, + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "address", + "name": "tokenAddress", + "type": "address" + } + ], + "name": "associateInternalContractCall", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, { "inputs": [ { @@ -117,5 +135,24 @@ "outputs": [], "stateMutability": "view", "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "token", + "type": "address" + } + ], + "name": "pauseToken", + "outputs": [ + { + "internalType": "int256", + "name": "responseCode", + "type": "int256" + } + ], + "stateMutability": "nonpayable", + "type": "function" } -] \ No newline at end of file +] diff --git a/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.sol b/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.sol index 39efebbc4caa..e10038a351db 100644 --- a/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.sol +++ b/hedera-node/test-clients/src/main/resource/contract/contracts/NestedAssociateDissociate/NestedAssociateDissociate.sol @@ -3,7 +3,7 @@ pragma solidity ^0.6.12; import "./HederaTokenService.sol"; -contract NestedAssociateDissociateContract is HederaTokenService { +contract NestedAssociateDissociate is HederaTokenService { AssociateDissociateContract associateDissociateContract; @@ -19,6 +19,10 @@ contract NestedAssociateDissociateContract is HederaTokenService { } } + function associateInternalContractCall(address sender, address tokenAddress) external { + associateDissociateContract.tokenAssociate(sender, tokenAddress); + } + function dissociateAssociateContractCall(address sender, address tokenAddress) external { int response = HederaTokenService.dissociateToken(sender, tokenAddress); if (response != HederaResponseCodes.SUCCESS) { diff --git a/platform-sdk/swirlds-logging/src/test/java/com/swirlds/logging/test/fixtures/internal/FilteredLoggingMirrorTest.java b/platform-sdk/swirlds-logging/src/test/java/com/swirlds/logging/test/fixtures/internal/FilteredLoggingMirrorTest.java new file mode 100644 index 000000000000..01812c983eff --- /dev/null +++ b/platform-sdk/swirlds-logging/src/test/java/com/swirlds/logging/test/fixtures/internal/FilteredLoggingMirrorTest.java @@ -0,0 +1,217 @@ +/* + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.logging.test.fixtures.internal; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.swirlds.base.context.internal.ThreadLocalContext; +import com.swirlds.base.test.fixtures.concurrent.TestExecutor; +import com.swirlds.base.test.fixtures.concurrent.WithTestExecutor; +import com.swirlds.logging.api.Level; +import com.swirlds.logging.api.Logger; +import com.swirlds.logging.api.Loggers; +import com.swirlds.logging.test.fixtures.LoggingMirror; +import com.swirlds.logging.test.fixtures.WithLoggingMirror; +import jakarta.inject.Inject; +import java.util.UUID; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +@WithTestExecutor +@WithLoggingMirror +class FilteredLoggingMirrorTest { + + private final Logger logger = Loggers.getLogger(FilteredLoggingMirrorTest.class); + private static final String KEY = UUID.randomUUID().toString(); + private static final String VALUE = "VALUE"; + private static final String KEY2 = UUID.randomUUID().toString(); + private static final String TEST_THREAD = UUID.randomUUID().toString(); + private static final String VALUE2 = "OTHER VALUE"; + + @Inject + private LoggingMirror loggingMirror; + + @Inject + private TestExecutor executor; + + @Test + @DisplayName("filtering By Context Should Return Log Statements Done With right Context") + void testFilterByContext() { + executor.executeAndWait(this::doLog); + assertThat(loggingMirror.filterByContext(KEY, VALUE).getEventCount()).isEqualTo(7); + assertThat(loggingMirror + .filter(logEvent -> logEvent.context().containsKey(KEY)) + .getEventCount()) + .isEqualTo(7); + } + + @Test + @DisplayName("filtering By Context Nesting Conditions Should Return Log Statements Done With right Context") + void testFilterWithNestingByContext() { + executor.executeAndWait(this::doLog); + assertThat(loggingMirror + .filter(logEvent -> logEvent.context().containsKey(KEY)) + .filter(logEvent -> logEvent.context().containsKey(KEY2)) + .getEventCount()) + .isEqualTo(1); + assertThat(loggingMirror + .filterByContext(KEY, VALUE) + .filterByContext(KEY2, VALUE2) + .getEventCount()) + .isEqualTo(1); + } + + @Test + @DisplayName("filtering By Logger Should Return Log Statements Done With right Logger") + void testFilterByLogger() { + executor.executeAndWait(this::doLog); + assertThat(loggingMirror.filterByLogger(FilteredLoggingMirrorTest.class).getEventCount()) + .isEqualTo(19); + assertThat(loggingMirror.filterByLogger("AnotherName").getEventCount()).isEqualTo(0); + } + + @Test + @DisplayName("filtering By Thread Should Return Log Statements Done With right Thread") + void testFilterByThread() { + executor.executeAndWait(() -> { + Thread.currentThread().setName(TEST_THREAD); + logger.trace("This is a {} level message", Level.TRACE); + logger.debug("This is a {} level message", Level.DEBUG); + }); + assertThat(loggingMirror.filterByThread(TEST_THREAD).getEventCount()).isEqualTo(2); + + assertThat(loggingMirror + .filterByThread(TEST_THREAD) + .filterByLevel(Level.TRACE) + .getEventCount()) + .isEqualTo(1); + + assertThat(loggingMirror + .filterByThread(TEST_THREAD) + .filterByLevel(Level.DEBUG) + .getEventCount()) + .isEqualTo(1); + + assertThat(loggingMirror + .filterByThread(TEST_THREAD) + .filterByLevel(Level.INFO) + .getEventCount()) + .isEqualTo(0); + + assertThat(loggingMirror.filterByCurrentThread().getEventCount()).isEqualTo(0); + } + + @Test + @DisplayName("filtering By Level Should Return Log Statements Done With right Level") + void testFilterByLevel() { + executor.executeAndWait(this::doLog); + assertThat(loggingMirror.filterByLevel(Level.OFF).getEventCount()).isEqualTo(1); + + assertThat(loggingMirror.filterByLevel(Level.TRACE).getEventCount()).isEqualTo(3); + + assertThat(loggingMirror.filterByLevel(Level.DEBUG).getEventCount()).isEqualTo(3); + + assertThat(loggingMirror.filterByLevel(Level.ERROR).getEventCount()).isEqualTo(3); + + assertThat(loggingMirror.filterByLevel(Level.WARN).getEventCount()).isEqualTo(3); + + assertThat(loggingMirror.filterByLevel(Level.INFO).getEventCount()).isEqualTo(6); + + assertThat(loggingMirror + .filterByLevel(Level.INFO) + .filterByContext(KEY, VALUE) + .getEventCount()) + .isEqualTo(3); + + assertThat(loggingMirror + .filterByLevel(Level.INFO) + .filterByContext(KEY, VALUE) + .filterByContext(KEY2, VALUE2) + .getEventCount()) + .isEqualTo(1); + } + + @Test + @DisplayName("filtering Above Level Should Return Log Statements Done With right Level") + void testFilterAboveLevel() { + executor.executeAndWait(() -> { + Thread.currentThread().setName(TEST_THREAD); + logger.error("This is a {} level message", Level.ERROR); + logger.warn("This is a {} level message", Level.WARN); + logger.info("This is a {} level message", Level.INFO); + logger.debug("This is a {} level message", Level.DEBUG); + logger.trace("This is a {} level message", Level.TRACE); + }); + assertThat(loggingMirror.filterAboveLevel(Level.OFF).getEventCount()).isEqualTo(0); + assertThat(loggingMirror.filterAboveLevel(Level.ERROR).getEventCount()).isEqualTo(1); + assertThat(loggingMirror.filterAboveLevel(Level.WARN).getEventCount()).isEqualTo(2); + assertThat(loggingMirror.filterAboveLevel(Level.INFO).getEventCount()).isEqualTo(3); + assertThat(loggingMirror.filterAboveLevel(Level.DEBUG).getEventCount()).isEqualTo(4); + assertThat(loggingMirror.filterAboveLevel(Level.TRACE).getEventCount()).isEqualTo(5); + assertThat(loggingMirror + .filterAboveLevel(Level.TRACE) + .filterByThread("WRONG-NAME") + .getEventCount()) + .isEqualTo(0); + assertThat(loggingMirror + .filterAboveLevel(Level.TRACE) + .filterByThread(TEST_THREAD) + .getEventCount()) + .isEqualTo(5); + } + + private void doLog() { + + logger.trace("This is a {} level message", Level.TRACE); + logger.debug("This is a {} level message", Level.DEBUG); + logger.info("This is a {} level message", Level.INFO); + logger.error("This is a {} level message", Level.ERROR); + logger.warn("This is a {} level message", Level.WARN); + logger.log(Level.OFF, "This is an off level"); + + final Logger testMarker = logger.withMarker("TEST_MARKER"); + testMarker.info("This is a {} level message", Level.INFO); + + try (final AutoCloseable closable = ThreadLocalContext.getInstance().add(KEY, VALUE)) { + logger.trace("This is a {} level message with context", Level.TRACE); + logger.debug("This is a {} level message with context", Level.DEBUG); + logger.info("This is a {} level message with context", Level.INFO); + logger.error("This is a {} level message with context", Level.ERROR); + logger.warn("This is a {} level message with context", Level.WARN); + testMarker.info("This is a {} level message with context and marker", Level.WARN); + } catch (Exception e) { + throw new RuntimeException(e); + } + + try (final AutoCloseable closable = ThreadLocalContext.getInstance().add(KEY2, VALUE2)) { + logger.trace("This is a {} level message with 2nd context", Level.TRACE); + logger.debug("This is a {} level message with 2nd context", Level.DEBUG); + logger.info("This is a {} level message with 2nd context", Level.INFO); + logger.error("This is a {} level message with 2nd context", Level.ERROR); + logger.warn("This is a {} level message with 2nd context", Level.WARN); + } catch (Exception e) { + throw new RuntimeException(e); + } + + try (final AutoCloseable closable = ThreadLocalContext.getInstance().add(KEY, VALUE); + final AutoCloseable closable2 = ThreadLocalContext.getInstance().add(KEY2, VALUE2)) { + testMarker.info("This is a {} level message with context, 2nd context and marker", Level.INFO); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} diff --git a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/LoggingMirror.java b/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/LoggingMirror.java index f9e5e1d86a1d..0d15cf5507d1 100644 --- a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/LoggingMirror.java +++ b/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/LoggingMirror.java @@ -20,6 +20,8 @@ import com.swirlds.logging.api.extensions.event.LogEvent; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; +import java.util.Objects; +import java.util.function.Predicate; /** * A mirror of the logging system that can be used to check the logging events that were generated during a test. A @@ -31,21 +33,39 @@ public interface LoggingMirror extends AutoCloseable { /** - * Returns the number of log events that were generated during a test. + * Returns a mirror that only contains log events were the logEventPredicate evaluates to true. * - * @return the number of log events that were generated during a test + * @param logEventPredicate the level to filter by + * @return a mirror that only contains log events with the given level */ - int getEventCount(); + @NonNull + LoggingMirror filter(@NonNull Predicate logEventPredicate); - @Override - default void close() throws Exception { - dispose(); + /** + * Returns a mirror that only contains log events with the given logger name (based on the class name). + * + * @param clazz the class to filter by + * @return a mirror that only contains log events with the given logger name + */ + default LoggingMirror filterByLogger(@NonNull final Class clazz) { + return filterByLogger(clazz.getName()); } /** - * Clears the mirror and disposes it. This method is automatically called before and after a test. + * Returns a list of all log events that were generated during a test. + * + * @return a list of all log events that were generated during a test + */ + List getEvents(); + + /** + * Returns the number of log events that were generated during a test. + * + * @return the number of log events that were generated during a test */ - void dispose(); + default int getEventCount() { + return getEvents().size(); + } /** * Returns a mirror that only contains log events with the given level. @@ -53,24 +73,41 @@ default void close() throws Exception { * @param level the level to filter by * @return a mirror that only contains log events with the given level */ - LoggingMirror filterByLevel(@NonNull final Level level); + @NonNull + default LoggingMirror filterByLevel(@NonNull final Level level) { + Objects.requireNonNull(level, "level must not be null"); + Predicate filter = event -> event.level() == level; + return filter(filter); + } /** - * Returns a mirror that only contains log events with the given context. + * Returns a mirror that only contains log events above the given level. * - * @param key the key of the context - * @param value the value of the context - * @return a mirror that only contains log events with the given context + * @param level the level to filter by + * @return a mirror that only contains log events with the given level */ - LoggingMirror filterByContext(@NonNull final String key, @NonNull final String value); + @NonNull + default LoggingMirror filterAboveLevel(@NonNull final Level level) { + Objects.requireNonNull(level, "level must not be null"); + final Predicate filter = + event -> level.ordinal() >= event.level().ordinal(); + return filter(filter); + } /** - * Returns a mirror that only contains log events with the current thread. + * Returns a mirror that only contains log events with the given context. * - * @return a mirror that only contains log events with the current thread + * @param key the key of the context + * @param value the value of the context + * @return a mirror that only contains log events with the given context */ - default LoggingMirror filterByCurrentThread() { - return filterByThread(Thread.currentThread().getName()); + @NonNull + default LoggingMirror filterByContext(@NonNull final String key, @NonNull final String value) { + Objects.requireNonNull(key, "key must not be null"); + Objects.requireNonNull(value, "value must not be null"); + final Predicate filter = event -> + event.context().containsKey(key) && event.context().get(key).equals(value); + return filter(filter); } /** @@ -79,16 +116,21 @@ default LoggingMirror filterByCurrentThread() { * @param threadName the name of the thread * @return a mirror that only contains log events with the given thread */ - LoggingMirror filterByThread(@NonNull final String threadName); + @NonNull + default LoggingMirror filterByThread(@NonNull final String threadName) { + Objects.requireNonNull(threadName, "threadName must not be null"); + final Predicate filter = event -> Objects.equals(event.threadName(), threadName); + return filter(filter); + } /** - * Returns a mirror that only contains log events with the given logger name (based on the class name). + * Returns a mirror that only contains log events with the current thread. * - * @param clazz the class to filter by - * @return a mirror that only contains log events with the given logger name + * @return a mirror that only contains log events with the current thread */ - default LoggingMirror filterByLogger(@NonNull final Class clazz) { - return filterByLogger(clazz.getName()); + @NonNull + default LoggingMirror filterByCurrentThread() { + return filterByThread(Thread.currentThread().getName()); } /** @@ -97,12 +139,10 @@ default LoggingMirror filterByLogger(@NonNull final Class clazz) { * @param loggerName the logger name to filter by * @return a mirror that only contains log events with the given logger name */ - LoggingMirror filterByLogger(@NonNull final String loggerName); - - /** - * Returns a list of all log events that were generated during a test. - * - * @return a list of all log events that were generated during a test - */ - List getEvents(); + @NonNull + default LoggingMirror filterByLogger(@NonNull final String loggerName) { + Objects.requireNonNull(loggerName, "loggerName must not be null"); + final Predicate filter = event -> event.loggerName().startsWith(loggerName); + return filter(filter); + } } diff --git a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/AbstractLoggingMirror.java b/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/AbstractLoggingMirror.java deleted file mode 100644 index 99b6a1e18258..000000000000 --- a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/AbstractLoggingMirror.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (C) 2023-2024 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.swirlds.logging.test.fixtures.internal; - -import com.swirlds.logging.api.Level; -import com.swirlds.logging.api.extensions.event.LogEvent; -import com.swirlds.logging.test.fixtures.LoggingMirror; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.Objects; -import java.util.function.Function; - -/** - * An abstract base class that implements the {@link LoggingMirror} interface - * and provides common filtering operations for log events. - */ -public abstract class AbstractLoggingMirror implements LoggingMirror { - - /** - * Creates a new instance of a logging mirror that filters log events based on - * the provided filter function. - * - * @param filter The filter function to apply to log events. - * @return A new logging mirror instance with the specified filter applied. - */ - protected abstract LoggingMirror filter(Function filter); - - /** - * {@inheritDoc} - */ - @Override - public int getEventCount() { - return getEvents().size(); - } - - /** - * {@inheritDoc} - */ - @Override - @NonNull - public LoggingMirror filterByLevel(@NonNull final Level level) { - Function filter = event -> event.level().ordinal() >= level.ordinal(); - return filter(filter); - } - - /** - * {@inheritDoc} - */ - @Override - @NonNull - public LoggingMirror filterByContext(@NonNull final String key, @NonNull final String value) { - Function filter = event -> - event.context().containsKey(key) && event.context().get(key).equals(value); - return filter(filter); - } - - /** - * {@inheritDoc} - */ - @Override - @NonNull - public LoggingMirror filterByThread(@NonNull final String threadName) { - Function filter = event -> Objects.equals(event.threadName(), threadName); - return filter(filter); - } - - /** - * {@inheritDoc} - */ - @Override - @NonNull - public LoggingMirror filterByLogger(@NonNull final String loggerName) { - Function filter = event -> event.loggerName().startsWith(loggerName); - return filter(filter); - } -} diff --git a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/FilteredLoggingMirror.java b/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/FilteredLoggingMirror.java index 8b1459e2cc96..08b06634d60e 100644 --- a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/FilteredLoggingMirror.java +++ b/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/FilteredLoggingMirror.java @@ -21,33 +21,34 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.util.AbstractList; import java.util.List; -import java.util.function.Function; +import java.util.Objects; +import java.util.function.Predicate; /** - * A concrete implementation of the {@link LoggingMirror} interface that represents a filtered view - * of log events based on a provided filter function. This class extends {@link AbstractLoggingMirror} - * and allows you to create a filtered mirror of log events. + * A concrete implementation of the {@link LoggingMirror} interface that represents a filtered view of log events based + * on a provided filter function. */ -public class FilteredLoggingMirror extends AbstractLoggingMirror { +public class FilteredLoggingMirror implements LoggingMirror { - private final Function filter; + private final Predicate filter; private final List list; - private final Runnable disposeAction; + private final Runnable closeAction; /** * Constructs a new {@code FilteredLoggingMirror} instance with the specified parameters. * * @param list The list of log events to filter. * @param filter The filter function used to select log events. - * @param disposeAction The action to be executed when this mirror is disposed. + * @param closeAction The action to be executed when this mirror is closed. + * @throws NullPointerException if one of the arguments is {@code null} */ public FilteredLoggingMirror( @NonNull final List list, - @NonNull final Function filter, - @NonNull final Runnable disposeAction) { - this.list = list; - this.filter = filter; - this.disposeAction = disposeAction; + @NonNull final Predicate filter, + @NonNull final Runnable closeAction) { + this.list = Objects.requireNonNull(list, "list must not be null"); + this.filter = Objects.requireNonNull(filter, "filter must not be null"); + this.closeAction = Objects.requireNonNull(closeAction, "closeAction must not be null"); } /** @@ -56,7 +57,7 @@ public FilteredLoggingMirror( @Override @NonNull public List getEvents() { - return list.stream().filter(filter::apply).toList(); + return list.stream().filter(filter).toList(); } /** @@ -64,7 +65,8 @@ public List getEvents() { */ @Override @NonNull - protected LoggingMirror filter(@NonNull final Function filter) { + public LoggingMirror filter(@NonNull final Predicate filter) { + Objects.requireNonNull(filter, "filter must not be null"); final List liveList = new AbstractList<>() { @Override public int size() { @@ -76,14 +78,14 @@ public LogEvent get(int index) { return list.get(index); } }; - return new FilteredLoggingMirror(liveList, filter, disposeAction); + return new FilteredLoggingMirror(liveList, this.filter.and(filter), closeAction); } /** - * {@inheritDoc} + * Clears the mirror and disposes it. This method is automatically called before and after a test. */ @Override - public void dispose() { - disposeAction.run(); + public void close() { + closeAction.run(); } } diff --git a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/LoggerMirrorExtension.java b/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/LoggerMirrorExtension.java index b1b8f6de0d54..b87fac971d98 100644 --- a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/LoggerMirrorExtension.java +++ b/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/LoggerMirrorExtension.java @@ -31,8 +31,9 @@ public void interceptTestMethod( ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { - try (final LoggingMirror loggingMirror = new LoggingMirrorImpl()) { + try (final LoggingMirrorImpl loggingMirror = new LoggingMirrorImpl()) { TestInjector.injectInTest(LoggingMirror.class, () -> loggingMirror, extensionContext); + TestInjector.injectInTest(LoggingMirrorImpl.class, () -> loggingMirror, extensionContext); invocation.proceed(); } } diff --git a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/LoggingMirrorImpl.java b/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/LoggingMirrorImpl.java index 2d9c431ca406..a72f4de61b54 100644 --- a/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/LoggingMirrorImpl.java +++ b/platform-sdk/swirlds-logging/src/testFixtures/java/com/swirlds/logging/test/fixtures/internal/LoggingMirrorImpl.java @@ -24,20 +24,19 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.function.Function; +import java.util.function.Predicate; /** - * A concrete implementation of the {@link LoggingMirror} interface that serves as a logging mirror - * and also implements the {@link LogHandler} interface to receive and store log events. It extends - * {@link AbstractLoggingMirror} to provide common filtering operations for log events. + * A concrete implementation of the {@link LoggingMirror} interface that serves as a logging mirror and also implements + * the {@link LogHandler} interface to receive and store log events. */ -public class LoggingMirrorImpl extends AbstractLoggingMirror implements LogHandler { +public class LoggingMirrorImpl implements LoggingMirror, LogHandler { private final List events = new CopyOnWriteArrayList<>(); /** - * Constructs a new {@code LoggingMirrorImpl} instance. It registers itself as a log handler - * with the default logging system to receive log events. + * Constructs a new {@code LoggingMirrorImpl} instance. It registers itself as a log handler with the default + * logging system to receive log events. */ public LoggingMirrorImpl() { DefaultLoggingSystem.getInstance().addHandler(this); @@ -52,19 +51,20 @@ public void accept(@NonNull final LogEvent event) { } /** - * {@inheritDoc} + * Clears the mirror and disposes it. This method is automatically called before and after a test. */ @Override - public void dispose() { + public void close() { DefaultLoggingSystem.getInstance().removeHandler(this); } /** * {@inheritDoc} */ + @NonNull @Override - protected LoggingMirror filter(@NonNull final Function filter) { - return new FilteredLoggingMirror(events, filter, this::dispose); + public LoggingMirror filter(@NonNull final Predicate filter) { + return new FilteredLoggingMirror(events, filter, this::close); } /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java index 8039dadb9090..8b2c9b0483ee 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java @@ -215,7 +215,7 @@ public class SwirldsPlatform implements Platform { * If a state was loaded from disk, this is the minimum generation non-ancient for that round. If starting from a * genesis state, this is 0. */ - private final long initialMinimumGenerationNonAncient; + private final long initialAncientThreshold; /** * The latest round to have reached consensus in the initial state @@ -750,11 +750,10 @@ public class SwirldsPlatform implements Platform { ancientMode)); if (startedFromGenesis) { - initialMinimumGenerationNonAncient = 0; + initialAncientThreshold = 0; startingRound = 0; } else { - initialMinimumGenerationNonAncient = - initialState.getState().getPlatformState().getMinimumGenerationNonAncient(); + initialAncientThreshold = initialState.getState().getPlatformState().getAncientThreshold(); startingRound = initialState.getRound(); latestImmutableState.setState(initialState.reserve("set latest immutable to initial state")); @@ -770,8 +769,8 @@ public class SwirldsPlatform implements Platform { // the non-expired event window will continue to expand until it reaches its full size. platformWiring.updateNonAncientEventWindow(new NonAncientEventWindow( initialState.getRound(), - initialMinimumGenerationNonAncient, - initialMinimumGenerationNonAncient, + initialAncientThreshold, + initialAncientThreshold, AncientMode.getAncientMode(platformContext))); platformWiring.getIssDetectorWiring().overridingState().put(initialState.reserve("initialize issDetector")); @@ -894,8 +893,8 @@ private void loadStateIntoConsensus(@NonNull final SignedState signedState) { // FUTURE WORK: this needs to be updated for birth round compatibility. final NonAncientEventWindow eventWindow = new NonAncientEventWindow( signedState.getRound(), - signedState.getState().getPlatformState().getMinimumGenerationNonAncient(), - signedState.getState().getPlatformState().getMinimumGenerationNonAncient(), + signedState.getState().getPlatformState().getAncientThreshold(), + signedState.getState().getPlatformState().getAncientThreshold(), ancientMode); shadowGraph.startWithEventWindow(eventWindow); @@ -963,8 +962,8 @@ private void loadReconnectState(final SignedState signedState) { platformWiring.updateNonAncientEventWindow(new NonAncientEventWindow( signedState.getRound(), - signedState.getMinRoundGeneration(), - signedState.getMinRoundGeneration(), + signedState.getState().getPlatformState().getAncientThreshold(), + signedState.getState().getPlatformState().getAncientThreshold(), ancientMode)); platformWiring.updateRunningHash(new RunningEventHashUpdate(signedState.getHashEventsCons(), true)); @@ -1055,12 +1054,12 @@ private void replayPreconsensusEvents() { // minimum generation non-ancient is reversed to a smaller value, so we skip it if (!emergencyRecoveryNeeded) { final IOIterator iterator = - initialPcesFiles.getEventIterator(initialMinimumGenerationNonAncient, startingRound); + initialPcesFiles.getEventIterator(initialAncientThreshold, startingRound); logger.info( STARTUP.getMarker(), "replaying preconsensus event stream starting at generation {}", - initialMinimumGenerationNonAncient); + initialAncientThreshold); platformWiring.getPcesReplayerIteratorInput().inject(iterator); } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/consensus/RoundCalculationUtils.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/consensus/RoundCalculationUtils.java index 182b72948e22..f84c390ef928 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/consensus/RoundCalculationUtils.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/consensus/RoundCalculationUtils.java @@ -16,7 +16,6 @@ package com.swirlds.platform.consensus; -import com.swirlds.platform.state.signed.SignedState; import com.swirlds.platform.system.events.EventConstants; import java.util.function.LongUnaryOperator; @@ -61,20 +60,4 @@ public static long getMinGenNonAncient( roundGenerationProvider.applyAsLong(getOldestNonAncientRound(roundsNonAncient, lastRoundDecided)), GraphGenerations.FIRST_GENERATION); } - - /** - * Returns the minimum generation below which all events are ancient for the round of the signed state - * - * @param roundsNonAncient - * the number of non-ancient rounds - * @param signedState - * the signed state that holds the minumum generation information - * @return minimum non-ancient generation - */ - public static long getMinGenNonAncient(final int roundsNonAncient, final SignedState signedState) { - return getMinGenNonAncient( - roundsNonAncient, - signedState.getRound(), - round -> signedState.getState().getPlatformState().getMinimumJudgeAncientIndicator(round)); - } } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/validation/InternalEventValidator.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/validation/InternalEventValidator.java index 5df0bb3d0bcb..20aa94612a3e 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/validation/InternalEventValidator.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/validation/InternalEventValidator.java @@ -18,7 +18,6 @@ import static com.swirlds.logging.legacy.LogMarker.EXCEPTION; import static com.swirlds.metrics.api.Metrics.PLATFORM_CATEGORY; -import static com.swirlds.platform.consensus.ConsensusConstants.ROUND_FIRST; import static com.swirlds.platform.consensus.ConsensusConstants.ROUND_NEGATIVE_INFINITY; import static com.swirlds.platform.system.events.EventConstants.FIRST_GENERATION; @@ -27,9 +26,7 @@ import com.swirlds.common.utility.throttle.RateLimitedLogger; import com.swirlds.metrics.api.LongAccumulator; import com.swirlds.platform.config.TransactionConfig; -import com.swirlds.platform.event.AncientMode; import com.swirlds.platform.event.GossipEvent; -import com.swirlds.platform.eventhandling.EventConfig; import com.swirlds.platform.gossip.IntakeEventCounter; import com.swirlds.platform.system.events.BaseEventHashedData; import com.swirlds.platform.system.events.EventDescriptor; @@ -82,8 +79,6 @@ public class InternalEventValidator { private final LongAccumulator invalidGenerationAccumulator; private final LongAccumulator invalidBirthRoundAccumulator; - private final AncientMode ancientMode; - /** * Constructor * @@ -154,11 +149,6 @@ public InternalEventValidator( .getOrCreate(new LongAccumulator.Config(PLATFORM_CATEGORY, "eventsWithInvalidBirthRound") .withDescription("Events with an invalid birth round") .withUnit("events")); - - this.ancientMode = platformContext - .getConfiguration() - .getConfigData(EventConfig.class) - .getAncientMode(); } /** @@ -231,14 +221,6 @@ private boolean areParentsInternallyConsistent(@NonNull final GossipEvent event) inconsistentSelfParentAccumulator.update(1); return false; } - if (selfParent.getBirthRound() < ROUND_FIRST) { - inconsistentSelfParentLogger.error( - EXCEPTION.getMarker(), - "Event %s has self parent with birth round less than the ROUND_FIRST. self-parent birth round: %s" - .formatted(event, selfParent.getBirthRound())); - inconsistentSelfParentAccumulator.update(1); - return false; - } } for (final EventDescriptor otherParent : hashedData.getOtherParents()) { @@ -250,14 +232,6 @@ private boolean areParentsInternallyConsistent(@NonNull final GossipEvent event) inconsistentOtherParentAccumulator.update(1); return false; } - if (otherParent.getBirthRound() < ROUND_FIRST) { - inconsistentOtherParentLogger.error( - EXCEPTION.getMarker(), - "Event %s has other parent with birth round less than the ROUND_FIRST. other-parent: %s" - .formatted(event, otherParent)); - inconsistentOtherParentAccumulator.update(1); - return false; - } } // only single node networks are allowed to have identical self-parent and other-parent hashes @@ -323,15 +297,6 @@ private boolean isEventGenerationValid(@NonNull final GossipEvent event) { private boolean isEventBirthRoundValid(@NonNull final GossipEvent event) { final long eventBirthRound = event.getDescriptor().getBirthRound(); - if (eventBirthRound < ROUND_FIRST) { - invalidBirthRoundLogger.error( - EXCEPTION.getMarker(), - "Event %s has an invalid birth round. Event birth round: %s, the min birth round is: %s" - .formatted(event, eventBirthRound, ROUND_FIRST)); - invalidBirthRoundAccumulator.update(1); - return false; - } - long maxParentBirthRound = ROUND_NEGATIVE_INFINITY; final EventDescriptor parent = event.getHashedData().getSelfParent(); if (parent != null) { diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java index bd0c8bf822be..c8b8aa2aebb7 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java @@ -243,7 +243,7 @@ public void shiftWindow(final long firstSequenceNumberInWindow) { @Override public void loadFromSignedState(final SignedState signedState) { - shiftWindow(signedState.getMinRoundGeneration()); + shiftWindow(signedState.getState().getPlatformState().getAncientThreshold()); } /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/PlatformState.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/PlatformState.java index b08cd564d84e..661c0a5a86a6 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/PlatformState.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/PlatformState.java @@ -22,7 +22,7 @@ import com.swirlds.common.merkle.MerkleLeaf; import com.swirlds.common.merkle.impl.PartialMerkleLeaf; import com.swirlds.platform.consensus.ConsensusSnapshot; -import com.swirlds.platform.consensus.RoundCalculationUtils; +import com.swirlds.platform.event.AncientMode; import com.swirlds.platform.system.SoftwareVersion; import com.swirlds.platform.system.address.AddressBook; import com.swirlds.platform.uptime.UptimeDataImpl; @@ -31,7 +31,6 @@ import java.io.IOException; import java.time.Instant; import java.util.List; -import java.util.NoSuchElementException; import java.util.Objects; /** @@ -334,53 +333,31 @@ public void setConsensusTimestamp(@NonNull final Instant consensusTimestamp) { } /** - * Get the minimum ancient indicator for all judges for each round. + * For the oldest non-ancient round, get the lowest ancient indicator out of all of those round's judges. This is + * the ancient threshold at the moment after this state's round reached consensus. All events with an ancient + * indicator that is greater than or equal to this value are non-ancient. All events with an ancient indicator less + * than this value are ancient. * - * @return minimum judge info list, or null if this is a genesis state - */ - @Nullable - public List getMinimumJudgeInfoList() { - return snapshot == null ? null : snapshot.getMinimumJudgeInfoList(); - } - - /** - * The minimum ancient indicator of famous witnesses (i.e. judges) for the round specified. This method only looks - * at non-ancient rounds contained within this state. - * - * @param round the round whose minimum judge ancient indicator will be returned - * @return the minimum judge ancient indicator for the round specified - * @throws NoSuchElementException if the judge information for this round is not contained withing this state - */ - public long getMinimumJudgeAncientIndicator(final long round) { - final List minimumJudgeInfo = getMinimumJudgeInfoList(); - if (minimumJudgeInfo == null) { - throw new IllegalStateException("No minimum judge info found in state for round " + round); - } - - for (final MinimumJudgeInfo info : minimumJudgeInfo) { - if (info.round() == round) { - return info.minimumJudgeAncientThreshold(); - } - } - throw new NoSuchElementException("No minimum judge info found for round: " + round); - } - - /** - * Return the round generation of the oldest round in this state + *

+ * When running in {@link AncientMode#GENERATION_THRESHOLD}, this value is the minimum generation non-ancient. When + * running in {@link AncientMode#BIRTH_ROUND_THRESHOLD}, this value is the minimum birth round non-ancient. + *

* - * @return the generation of the oldest round + * @return the ancient threshold after this round has reached consensus */ - public long getMinRoundGeneration() { + public long getAncientThreshold() { + if (snapshot == null) { + throw new IllegalStateException( + "No minimum judge info found in state for round " + round + ", snapshot is null"); + } - final List minimumJudgeInfo = getMinimumJudgeInfoList(); - if (minimumJudgeInfo == null) { - throw new IllegalStateException("No MinGen info found in state for round " + round); + final List minimumJudgeInfo = snapshot.getMinimumJudgeInfoList(); + if (minimumJudgeInfo.isEmpty()) { + throw new IllegalStateException( + "No minimum judge info found in state for round " + round + ", list is empty"); } - return getMinimumJudgeInfoList().stream() - .findFirst() - .orElseThrow(() -> new IllegalStateException("No MinGen info found in state")) - .minimumJudgeAncientThreshold(); + return minimumJudgeInfo.getFirst().minimumJudgeAncientThreshold(); } /** @@ -439,16 +416,6 @@ public int getRoundsNonAncient() { return roundsNonAncient; } - /** - * Gets the minimum generation of non-ancient events. - * - * @return the minimum generation of non-ancient events - */ - public long getMinimumGenerationNonAncient() { - return RoundCalculationUtils.getMinGenNonAncient( - roundsNonAncient, round, this::getMinimumJudgeAncientIndicator); - } - /** * @return the consensus snapshot for this round */ diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/State.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/State.java index 57f2cb49b06c..59088b79f21d 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/State.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/State.java @@ -256,8 +256,9 @@ public String getInfoString(final int hashDepth) { final PlatformState platformState = getPlatformState(); final Hash epochHash = platformState.getNextEpochHash(); final Hash hashEventsCons = platformState.getRunningEventHash(); - final List minimumJudgeInfo = platformState.getMinimumJudgeInfoList(); + final ConsensusSnapshot snapshot = platformState.getSnapshot(); + final List minimumJudgeInfo = snapshot == null ? null : snapshot.getMinimumJudgeInfoList(); final StringBuilder sb = new StringBuilder(); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SavedStateMetadata.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SavedStateMetadata.java index ea741ebc4de6..f44705fd9a4e 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SavedStateMetadata.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SavedStateMetadata.java @@ -188,7 +188,7 @@ public static SavedStateMetadata create( signedState.getConsensusTimestamp(), platformState.getRunningEventHash(), platformState.getRunningEventHash().toMnemonic(), - platformState.getMinimumGenerationNonAncient(), + platformState.getAncientThreshold(), convertToString(platformState.getCreationSoftwareVersion()), now, selfId, diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SavedStateMetadataField.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SavedStateMetadataField.java index 23a192433cd0..9b2fe4ba9008 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SavedStateMetadataField.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SavedStateMetadataField.java @@ -50,7 +50,8 @@ public enum SavedStateMetadataField { */ RUNNING_EVENT_HASH_MNEMONIC, /** - * The minimum generation of non-ancient events after this state reached consensus. + * The minimum generation of non-ancient events after this state reached consensus. Future work: this needs to be + * migrated once we have switched to {@link com.swirlds.platform.event.AncientMode#BIRTH_ROUND_THRESHOLD}. */ MINIMUM_GENERATION_NON_ANCIENT, /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedState.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedState.java index 46e78649db68..6520d6d6eeb1 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedState.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedState.java @@ -46,7 +46,6 @@ import java.time.Instant; import java.util.ArrayList; import java.util.List; -import java.util.NoSuchElementException; import java.util.Objects; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -467,27 +466,6 @@ public String toString() { return state.getPlatformState().getRunningEventHash(); } - /** - * The minimum generation of famous witnesses for the round specified. This method only looks at non-ancient rounds - * contained within this state. - * - * @param round the round whose minimum generation will be returned - * @return the minimum generation for the round specified - * @throws NoSuchElementException if the generation information for this round is not contained withing this state - */ - public long getMinGen(final long round) { - return getState().getPlatformState().getMinimumJudgeAncientIndicator(round); - } - - /** - * Return the round generation of the oldest round in this state - * - * @return the generation of the oldest round - */ - public long getMinRoundGeneration() { - return getState().getPlatformState().getMinRoundGeneration(); - } - /** * Check if this is a state that needs to be eventually written to disk. * diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java index 3904acef3738..daf74bfcd893 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java @@ -168,7 +168,7 @@ public static void writeSignedStateFilesToDirectory( platformContext, selfId, directory, - signedState.getState().getPlatformState().getMinimumGenerationNonAncient(), + signedState.getState().getPlatformState().getAncientThreshold(), signedState.getRound()); } } diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/consensus/RoundCalculationUtilsTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/consensus/RoundCalculationUtilsTest.java index 2fdd0fabcb76..f39c054b2404 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/consensus/RoundCalculationUtilsTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/consensus/RoundCalculationUtilsTest.java @@ -76,20 +76,6 @@ void getMinGenNonAncientFromSignedState() { final AtomicLong lastRoundDecided = new AtomicLong(); when(signedState.getRound()).thenAnswer(a -> lastRoundDecided.get()); - when(signedState.getMinGen(Mockito.anyLong())).thenAnswer(a -> map.get(a.getArgument(0, Long.class))); when(platformState.getRound()).thenAnswer(a -> lastRoundDecided.get()); - when(platformState.getMinimumJudgeAncientIndicator(Mockito.anyLong())) - .thenAnswer(a -> map.get(a.getArgument(0, Long.class))); - - lastRoundDecided.set(10); - Assertions.assertEquals( - 60, - RoundCalculationUtils.getMinGenNonAncient(5, signedState), - "if the oldest non-ancient round is 6, then the generation should 60"); - lastRoundDecided.set(5); - Assertions.assertEquals( - 10, - RoundCalculationUtils.getMinGenNonAncient(10, signedState), - "if no rounds are ancient yet, then the minGenNonAncient is the first round generation"); } } diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/event/validation/InternalEventValidatorTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/event/validation/InternalEventValidatorTests.java index 9179cbd90678..661254718acc 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/event/validation/InternalEventValidatorTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/event/validation/InternalEventValidatorTests.java @@ -18,7 +18,6 @@ import static com.swirlds.common.test.fixtures.RandomUtils.getRandomPrintSeed; import static com.swirlds.common.test.fixtures.RandomUtils.randomHash; -import static com.swirlds.platform.consensus.ConsensusConstants.ROUND_NEGATIVE_INFINITY; import static com.swirlds.platform.system.events.EventConstants.GENERATION_UNDEFINED; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -134,40 +133,6 @@ private static GossipEvent generateGoodEvent(@NonNull final Random random, final totalTransactionBytes); } - @Test - @DisplayName("Generation Threshold With Negative BirthRound should NOT validate") - void generationThresholdWithNegativeBirthRound() { - final EventDescriptor selfParent = new EventDescriptor(randomHash(random), new NodeId(0), 0, -1); - final EventDescriptor otherParent = new EventDescriptor(randomHash(random), new NodeId(0), 0, -1); - final EventDescriptor self = new EventDescriptor(randomHash(random), new NodeId(0), 1, -1); - final GossipEvent event = generateEvent(self, selfParent, otherParent, 1111); - - final IntakeEventCounter intakeEventCounter = mock(IntakeEventCounter.class); - doAnswer(invocation -> { - exitedIntakePipelineCount.incrementAndGet(); - return null; - }) - .when(intakeEventCounter) - .eventExitedIntakePipeline(any()); - - final PlatformContext platformContext = TestPlatformContextBuilder.create() - .withConfiguration(new TestConfigBuilder() - .withValue(EventConfig_.USE_BIRTH_ROUND_ANCIENT_THRESHOLD, false) - .getOrCreateConfig()) - .build(); - - final Time time = new FakeTime(); - - final InternalEventValidator multinodeValidator = - new InternalEventValidator(platformContext, time, false, intakeEventCounter); - final InternalEventValidator singleNodeValidator = - new InternalEventValidator(platformContext, time, true, intakeEventCounter); - - assertNull(multinodeValidator.validateEvent(event)); - assertNull(singleNodeValidator.validateEvent(event)); - assertEquals(2, exitedIntakePipelineCount.get()); - } - @Test @DisplayName("An event with null hashed data is invalid") void nullHashedData() { @@ -214,13 +179,6 @@ void inconsistentParents() { new EventDescriptor(randomHash(random), new NodeId(1), 6, 1), 1111); - // self parent has invalid birth round. - final GossipEvent invalidSelfParentBirthRound = generateEvent( - new EventDescriptor(randomHash(random), new NodeId(0), 7, 1), - new EventDescriptor(randomHash(random), new NodeId(0), 5, ROUND_NEGATIVE_INFINITY), - new EventDescriptor(randomHash(random), new NodeId(1), 6, 1), - 1111); - // other parent has invalid generation. final GossipEvent invalidOtherParentGeneration = generateEvent( new EventDescriptor(randomHash(random), new NodeId(0), 7, 1), @@ -228,24 +186,13 @@ void inconsistentParents() { new EventDescriptor(randomHash(random), new NodeId(1), GENERATION_UNDEFINED, 1), 1111); - // other parent has invalid birth round. - final GossipEvent invalidOtherParentBirthRound = generateEvent( - new EventDescriptor(randomHash(random), new NodeId(0), 7, 1), - new EventDescriptor(randomHash(random), new NodeId(0), 5, 1), - new EventDescriptor(randomHash(random), new NodeId(1), 6, ROUND_NEGATIVE_INFINITY), - 1111); - assertNull(multinodeValidator.validateEvent(invalidSelfParentGeneration)); - assertNull(multinodeValidator.validateEvent(invalidSelfParentBirthRound)); assertNull(multinodeValidator.validateEvent(invalidOtherParentGeneration)); - assertNull(multinodeValidator.validateEvent(invalidOtherParentBirthRound)); assertNull(singleNodeValidator.validateEvent(invalidSelfParentGeneration)); - assertNull(singleNodeValidator.validateEvent(invalidSelfParentBirthRound)); assertNull(singleNodeValidator.validateEvent(invalidOtherParentGeneration)); - assertNull(singleNodeValidator.validateEvent(invalidOtherParentBirthRound)); - assertEquals(8, exitedIntakePipelineCount.get()); + assertEquals(4, exitedIntakePipelineCount.get()); } @Test diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/ChatterCoreTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/ChatterCoreTests.java index aae187afe733..40394f6678c6 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/ChatterCoreTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/ChatterCoreTests.java @@ -19,7 +19,6 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; import com.swirlds.base.time.Time; import com.swirlds.common.metrics.noop.NoOpMetrics; @@ -36,11 +35,13 @@ import java.util.LinkedList; import java.util.List; import java.util.Random; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; /** * Tests for {@link ChatterCore} */ +@Disabled public class ChatterCoreTests { /** @@ -112,7 +113,6 @@ void loadFromSignedStateTest() { private void loadSavedState(final ChatterCore chatterCore, final long stateMinGen) { final SignedState signedState = mock(SignedState.class); - when(signedState.getMinRoundGeneration()).thenReturn(stateMinGen); chatterCore.loadFromSignedState(signedState); }