From 0d7b1b438574072d452754271ab02a9d171d571a Mon Sep 17 00:00:00 2001 From: Nikolay Eskov Date: Thu, 14 Dec 2023 11:53:03 +0300 Subject: [PATCH 1/8] Fix 'transferTransactionById' in Ride. (#1274) Now function returns transaction only if transfer is regular or ethereum transfer transaction. Otherwise it returns 'unit'. --- pkg/api/metamask/service.go | 8 ++++---- pkg/proto/eth_transaction.go | 16 +++++++++++++-- pkg/proto/eth_tx_kind_resolver.go | 24 ++++++++++++---------- pkg/ride/functions_proto.go | 34 +++++++++++++++++++++++++++++++ 4 files changed, 65 insertions(+), 17 deletions(-) diff --git a/pkg/api/metamask/service.go b/pkg/api/metamask/service.go index aff97f5e3..3ac9ce272 100644 --- a/pkg/api/metamask/service.go +++ b/pkg/api/metamask/service.go @@ -172,14 +172,14 @@ func (s RPCService) Eth_EstimateGas(req estimateGasRequest) (string, error) { } } - txKind, err := proto.GuessEthereumTransactionKind(data) + txKind, err := proto.GuessEthereumTransactionKindType(data) if err != nil { return "", errors.Errorf("failed to guess ethereum tx kind, %v", err) } switch txKind { - case proto.EthereumTransferWavesKind: + case proto.EthereumTransferWavesKindType: return uint64ToHexString(proto.MinFee), nil - case proto.EthereumTransferAssetsKind: + case proto.EthereumTransferAssetsKindType: fee := proto.MinFee assetID := (*proto.AssetID)(req.To) @@ -191,7 +191,7 @@ func (s RPCService) Eth_EstimateGas(req estimateGasRequest) (string, error) { fee += proto.MinFeeScriptedAsset } return uint64ToHexString(uint64(fee)), nil - case proto.EthereumInvokeKind: + case proto.EthereumInvokeKindType: return uint64ToHexString(proto.MinFeeInvokeScript), nil default: return "", errors.Errorf("unexpected ethereum tx kind") diff --git a/pkg/proto/eth_transaction.go b/pkg/proto/eth_transaction.go index 1e3cfac03..ab956592e 100644 --- a/pkg/proto/eth_transaction.go +++ b/pkg/proto/eth_transaction.go @@ -86,17 +86,21 @@ type EthereumTxData interface { } type EthereumTransactionKind interface { + Type() EthereumTransactionKindType String() string DecodedData() *ethabi.DecodedCallData } -type EthereumTransferWavesTxKind struct { -} +type EthereumTransferWavesTxKind struct{} func NewEthereumTransferWavesTxKind() *EthereumTransferWavesTxKind { return &EthereumTransferWavesTxKind{} } +func (k *EthereumTransferWavesTxKind) Type() EthereumTransactionKindType { + return EthereumTransferWavesKindType +} + func (k *EthereumTransferWavesTxKind) DecodedData() *ethabi.DecodedCallData { return nil } @@ -115,6 +119,10 @@ func NewEthereumTransferAssetsErc20TxKind(decodedData ethabi.DecodedCallData, as return &EthereumTransferAssetsErc20TxKind{Asset: asset, decodedData: decodedData, Arguments: arguments} } +func (k *EthereumTransferAssetsErc20TxKind) Type() EthereumTransactionKindType { + return EthereumTransferAssetsKindType +} + func (k *EthereumTransferAssetsErc20TxKind) DecodedData() *ethabi.DecodedCallData { return &k.decodedData } @@ -131,6 +139,10 @@ func NewEthereumInvokeScriptTxKind(decodedData ethabi.DecodedCallData) *Ethereum return &EthereumInvokeScriptTxKind{decodedData: decodedData} } +func (k *EthereumInvokeScriptTxKind) Type() EthereumTransactionKindType { + return EthereumInvokeKindType +} + func (k *EthereumInvokeScriptTxKind) DecodedData() *ethabi.DecodedCallData { return &k.decodedData } diff --git a/pkg/proto/eth_tx_kind_resolver.go b/pkg/proto/eth_tx_kind_resolver.go index 827f0be9a..9168a678f 100644 --- a/pkg/proto/eth_tx_kind_resolver.go +++ b/pkg/proto/eth_tx_kind_resolver.go @@ -7,15 +7,17 @@ import ( "github.com/wavesplatform/gowaves/pkg/ride/ast" ) +type EthereumTransactionKindType byte + const ( - EthereumTransferWavesKind = iota + 1 - EthereumTransferAssetsKind - EthereumInvokeKind + EthereumTransferWavesKindType EthereumTransactionKindType = iota + 1 + EthereumTransferAssetsKindType + EthereumInvokeKindType ) -func GuessEthereumTransactionKind(data []byte) (int64, error) { +func GuessEthereumTransactionKindType(data []byte) (EthereumTransactionKindType, error) { if len(data) == 0 { - return EthereumTransferWavesKind, nil + return EthereumTransferWavesKindType, nil } selectorBytes := data @@ -28,10 +30,10 @@ func GuessEthereumTransactionKind(data []byte) (int64, error) { } if ethabi.IsERC20TransferSelector(selector) { - return EthereumTransferAssetsKind, nil + return EthereumTransferAssetsKindType, nil } - return EthereumInvokeKind, nil + return EthereumInvokeKindType, nil } type EthereumTransactionKindResolver interface { @@ -53,15 +55,15 @@ func NewEthereumTransactionKindResolver(resolver ethKindResolverState, scheme Sc } func (e *ethTxKindResolver) ResolveTxKind(ethTx *EthereumTransaction, isBlockRewardDistributionActivated bool) (EthereumTransactionKind, error) { - txKind, err := GuessEthereumTransactionKind(ethTx.Data()) + txKind, err := GuessEthereumTransactionKindType(ethTx.Data()) if err != nil { return nil, errors.Wrap(err, "failed to guess ethereum tx kind") } switch txKind { - case EthereumTransferWavesKind: + case EthereumTransferWavesKindType: return NewEthereumTransferWavesTxKind(), nil - case EthereumTransferAssetsKind: + case EthereumTransferAssetsKindType: db := ethabi.NewErc20MethodsMap() decodedData, err := db.ParseCallDataRide(ethTx.Data(), isBlockRewardDistributionActivated) if err != nil { @@ -81,7 +83,7 @@ func (e *ethTxKindResolver) ResolveTxKind(ethTx *EthereumTransaction, isBlockRew return nil, errors.Wrap(err, "failed to get erc20 arguments from decoded data") } return NewEthereumTransferAssetsErc20TxKind(*decodedData, *NewOptionalAssetFromDigest(assetInfo.ID), erc20Arguments), nil - case EthereumInvokeKind: + case EthereumInvokeKindType: scriptAddr, err := ethTx.WavesAddressTo(e.scheme) if err != nil { return nil, err diff --git a/pkg/ride/functions_proto.go b/pkg/ride/functions_proto.go index d7216a70c..a084e7360 100644 --- a/pkg/ride/functions_proto.go +++ b/pkg/ride/functions_proto.go @@ -865,6 +865,40 @@ func transferByID(env environment, args ...rideType) (rideType, error) { } return nil, errors.Wrap(err, "transferByID") } + switch t := tx.GetTypeInfo().Type; t { + case proto.TransferTransaction: + // ok, it's transfer tx + case proto.EthereumMetamaskTransaction: + ethTx, ok := tx.(*proto.EthereumTransaction) + if !ok { + return nil, errors.Errorf("transferByID: expected ethereum transaction, got (%T)", tx) + } + kindType, ktErr := proto.GuessEthereumTransactionKindType(ethTx.Data()) + if ktErr != nil { + return nil, errors.Wrap(err, "transferByID: failed to guess ethereum transaction kind type") + } + switch kindType { + case proto.EthereumTransferWavesKindType, proto.EthereumTransferAssetsKindType: + // ok, it's an ethereum transfer tx (waves or asset) + case proto.EthereumInvokeKindType: + return rideUnit{}, nil // it's not an ethereum transfer tx + default: + return rideUnit{}, errors.Errorf("transferByID: unreachable point reached in eth kind type switch") + } + case proto.GenesisTransaction, proto.PaymentTransaction, proto.IssueTransaction, + proto.ReissueTransaction, proto.BurnTransaction, proto.ExchangeTransaction, + proto.LeaseTransaction, proto.LeaseCancelTransaction, proto.CreateAliasTransaction, + proto.MassTransferTransaction, proto.DataTransaction, proto.SetScriptTransaction, + proto.SponsorshipTransaction, proto.SetAssetScriptTransaction, proto.InvokeScriptTransaction, + proto.UpdateAssetInfoTransaction, proto.InvokeExpressionTransaction: + // it's not a transfer transaction + return rideUnit{}, nil + default: + return rideUnit{}, errors.Errorf("transferByID: unreachable point reached in tx type switch") + } + if tx.GetTypeInfo().Type != proto.TransferTransaction { + return rideUnit{}, nil + } obj, err := transactionToObject(env, tx) if err != nil { return nil, errors.Wrap(err, "transferByID") From 7379737a377f2ac4f50d832b77a8526e8bccdd39 Mon Sep 17 00:00:00 2001 From: Nikolay Eskov Date: Thu, 14 Dec 2023 14:01:24 +0300 Subject: [PATCH 2/8] Add test and bugfix for 'transferTransactionByID'. (#1275) * Add test and bugfix for 'transferTransactionByID'. Continuation of #1274. * Remove unnecessary code in 'TestTransferByID'. --- pkg/ride/functions_proto.go | 3 - pkg/ride/functions_proto_test.go | 123 ++++++++++++++++++++++++++++++- 2 files changed, 122 insertions(+), 4 deletions(-) diff --git a/pkg/ride/functions_proto.go b/pkg/ride/functions_proto.go index a084e7360..122d86014 100644 --- a/pkg/ride/functions_proto.go +++ b/pkg/ride/functions_proto.go @@ -896,9 +896,6 @@ func transferByID(env environment, args ...rideType) (rideType, error) { default: return rideUnit{}, errors.Errorf("transferByID: unreachable point reached in tx type switch") } - if tx.GetTypeInfo().Type != proto.TransferTransaction { - return rideUnit{}, nil - } obj, err := transactionToObject(env, tx) if err != nil { return nil, errors.Wrap(err, "transferByID") diff --git a/pkg/ride/functions_proto_test.go b/pkg/ride/functions_proto_test.go index 1b3df4e35..b3290e4aa 100644 --- a/pkg/ride/functions_proto_test.go +++ b/pkg/ride/functions_proto_test.go @@ -4,6 +4,9 @@ import ( "bytes" "encoding/base64" "encoding/hex" + "math/big" + "strconv" + "strings" "testing" "time" @@ -15,8 +18,11 @@ import ( "github.com/wavesplatform/gowaves/pkg/crypto" "github.com/wavesplatform/gowaves/pkg/keyvalue" "github.com/wavesplatform/gowaves/pkg/proto" + "github.com/wavesplatform/gowaves/pkg/proto/ethabi" "github.com/wavesplatform/gowaves/pkg/ride/ast" + "github.com/wavesplatform/gowaves/pkg/ride/meta" "github.com/wavesplatform/gowaves/pkg/types" + "github.com/wavesplatform/gowaves/pkg/util/byte_helpers" ) var ( @@ -763,8 +769,123 @@ func TestBlockInfoByHeight(t *testing.T) { } } +func getPtr[T any](t T) *T { return &t } + func TestTransferByID(t *testing.T) { - t.SkipNow() + dApp1 := newTestAccount(t, "DAPP1") // 3MzDtgL5yw73C2xVLnLJCrT5gCL4357a4sz + sender := newTestAccount(t, "SENDER") // 3N8CkZAyS4XcDoJTJoKNuNk2xmNKmQj7myW + txID, err := crypto.NewDigestFromBase58("GemGCop1arCvTY447FLH8tDQF7knvzNCocNTHqKQBus9") + require.NoError(t, err) + assetID := txID + stubEthPK := new(proto.EthereumPublicKey) + ethTo := getPtr(proto.EthereumAddress(assetID[:proto.EthereumAddressSize])) + + erc20HexData := "0xa9059cbb0000000000000000000000009a1989946ae4249aac19ac7a038d24aab03c3d8c00000000000000000000000000000000000000000000000000001cc92ad60000" //nolint:lll + erc20Data, err := hex.DecodeString(strings.TrimPrefix(erc20HexData, "0x")) + require.NoError(t, err) + callData, err := ethabi.NewErc20MethodsMap().ParseCallDataRide(erc20Data, true) + require.NoError(t, err) + + rideFunctionMeta := meta.Function{ + Name: "call", + Arguments: []meta.Type{meta.String}, + } + callHexData := "0x3e08c22800000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000573616664730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" //nolint:lll + invokeData, err := hex.DecodeString(strings.TrimPrefix(callHexData, "0x")) + require.NoError(t, err) + mm, err := ethabi.NewMethodsMapFromRideDAppMeta(meta.DApp{Functions: []meta.Function{rideFunctionMeta}}) + require.NoError(t, err) + invokeCallData, err := mm.ParseCallDataRide(invokeData, true) + require.NoError(t, err) + + testCases := []struct { + tx proto.Transaction + unit bool + }{ + { + tx: byte_helpers.TransferWithProofs.Transaction.Clone(), + unit: false, + }, + { + tx: byte_helpers.TransferWithSig.Transaction.Clone(), + unit: false, + }, + { + tx: getPtr(proto.NewEthereumTransaction( + &proto.EthereumLegacyTx{To: ethTo, Value: big.NewInt(100500)}, + proto.NewEthereumTransferWavesTxKind(), + &txID, + stubEthPK, + 0, + )), + unit: false, + }, + { + tx: getPtr(proto.NewEthereumTransaction( + &proto.EthereumLegacyTx{ + To: ethTo, + Data: erc20Data, + }, + proto.NewEthereumTransferAssetsErc20TxKind( + *callData, + proto.NewOptionalAsset(true, assetID), + ethabi.ERC20TransferArguments{Recipient: sender.address().ID(), Amount: 100500}, + ), + &txID, + stubEthPK, + 0, + )), + unit: false, + }, + { + tx: getPtr(proto.NewEthereumTransaction( + &proto.EthereumLegacyTx{ + To: ethTo, + Data: invokeData, + }, + proto.NewEthereumInvokeScriptTxKind(*invokeCallData), + &txID, + stubEthPK, + 0, + )), + unit: true, + }, + { + tx: byte_helpers.InvokeScriptWithProofs.Transaction.Clone(), + unit: true, + }, + } + + for i, testCase := range testCases { + t.Run(strconv.Itoa(i+1), func(t *testing.T) { + env := newTestEnv(t).withLibVersion(ast.LibV5).withComplexityLimit(ast.LibV5, 26000). + withBlockV5Activated().withProtobufTx(). + withDataEntriesSizeV2().withMessageLengthV3(). + withValidateInternalPayments().withThis(dApp1). + withDApp(dApp1).withSender(sender). + withInvocation("call"). + withWavesBalance(dApp1, 1_00000000).withWavesBalance(sender, 1_00000000). + withTransaction(testCase.tx). + withAsset(&proto.FullAssetInfo{ + AssetInfo: proto.AssetInfo{ + AssetConstInfo: proto.AssetConstInfo{ + ID: txID, + }, + }, + }). + withWrappedState() + + txIDBytes := txID.Bytes() + res, tErr := transferByID(env.me, rideByteVector(txIDBytes)) + assert.NoError(t, tErr) + assert.NotNil(t, res) + if testCase.unit { + assert.Equal(t, rideUnit{}, res) + } else { + assert.NotEqual(t, rideUnit{}, res) + } + }) + } } func TestAddressToString(t *testing.T) { From abc99756f8fe1ea4fe9ad4775b8712b03e701699 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Dec 2023 10:56:47 +0400 Subject: [PATCH 3/8] Bump actions/upload-artifact from 3 to 4 (#1276) Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 3 to 4. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/go.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 6971fa056..c25679254 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -92,7 +92,7 @@ jobs: run: make itest - name: Upload itest logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: itest_logs From 2bd26f57a9f034c137a98af010b1eab8e2f0324a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Dec 2023 11:48:08 +0400 Subject: [PATCH 4/8] Bump github/codeql-action from 2 to 3 (#1271) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 2 to 3. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v2...v3) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Alexey Kiselev --- .github/workflows/codeql-analysis.yml | 6 +++--- .github/workflows/security.yml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index c1dfecf4f..dffc9e42d 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -43,7 +43,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -54,7 +54,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -68,4 +68,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml index 64c5459f9..e3bc4c46c 100644 --- a/.github/workflows/security.yml +++ b/.github/workflows/security.yml @@ -25,7 +25,7 @@ jobs: # with '-no-fail' we let the report trigger content trigger a failure using the GitHub Security features. args: "-no-fail -fmt sarif -out gosec.sarif ./..." - name: Upload SARIF file for GitHub Advanced Security Dashboard - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: gosec.sarif @@ -54,7 +54,7 @@ jobs: fi EOF - name: Upload SARIF file for GitHub Advanced Security Dashboard - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: semgrep.sarif From 3a8c8dcc25dc7c722ddc1d1c20e0e600c3ce633f Mon Sep 17 00:00:00 2001 From: Nikolay Eskov Date: Fri, 15 Dec 2023 12:25:21 +0300 Subject: [PATCH 5/8] Save block snapshot state hashes (#1269) * Created 'txSnapshotHasher'. * Add 'TestTxSnapshotHasher'. * Change 'txSnapshotHasher.CalculateHash' signature. Change 'TestTxSnapshotHasher'. * Increment 'StateVersion' constant. * Save block snapshot state hashes by block height. * Add '/go/debug/snapshotStateHash/{height}' HTTP API route for block snapshot state hashes. * Fixed the most part of tests in 'pkg/grpc/server' package. * Generate 'TransactionStatusSnapshot' and fixed rest of the tests in 'pkg/grpc/server' package. * Fix linter issues. * Fix: save block snapshot on its height. * Rename 'checkerInfo.height' to 'checkerInfo.blockchainHeight'. * Fixed 'TODO' in 'txSnapshotHasher.ApplyDataEntries'. * Remove unnecessary 'TODO' in 'txSnapshotHasher.CalculateHash'. * Add 'BenchmarkTxSnapshotHasher'. * Remove unnecessary assinment. * Rename 'performerInfo.height' to 'performerInfo.blockchainHeight'. * Refactored a bit 'transactionPerformer.performLease'. * Create 'performerInfo.blockHeight' method. * Use correct block height in 'transactionPerformer.performLease'. * Use correct block height in 'transactionPerformer.performLeaseCancel'. * Fix tests. * Removed unused 'proto.StateActionsCounter'. * Use 'stateActionsCounter' getter in 'blockSnapshotsApplier.ApplyNewAsset'. * Add 'txSnapshotHasher.Reset' method. Implemented 'txSnapshotHasher.Release'. Update 'BenchmarkTxSnapshotHasher'. * Implemented 'sort.Interface' for '[]hashEntry' slice. * Use 'bytebufferpool' for 'txSnapshotHasher'. * Reuse 'txSnapshotHasher' for transactions in the block. * Update 'TestTxSnapshotHasher' and 'BenchmarkTxSnapshotHasher'. * Move 'calculateTxSnapshotStateHash' func to 'snapshot_hasher.go' file. * Reuse fast hasher in 'txSnapshotHasher'. --- cmd/statehash/statehash.go | 8 +- pkg/api/node_api.go | 25 +- pkg/api/routes.go | 6 +- pkg/mock/state.go | 88 ++-- pkg/proto/common.go | 56 +++ pkg/proto/types.go | 82 +++- pkg/state/api.go | 3 +- pkg/state/appender.go | 134 ++++-- pkg/state/common_test.go | 11 +- pkg/state/constants.go | 2 +- pkg/state/ethereum_tx_test.go | 2 +- pkg/state/fee_validation_test.go | 4 +- pkg/state/history_storage.go | 10 +- pkg/state/invoke_applier.go | 7 +- pkg/state/invoke_applier_test.go | 9 +- pkg/state/keys.go | 50 +- pkg/state/script_caller.go | 2 +- pkg/state/snapshot_applier.go | 6 +- pkg/state/snapshot_generator.go | 11 +- pkg/state/snapshot_generator_internal_test.go | 34 +- pkg/state/snapshot_hasher.go | 454 ++++++++++++++++++ pkg/state/snapshot_hasher_internal_test.go | 238 +++++++++ pkg/state/state.go | 62 ++- pkg/state/state_hashes.go | 34 +- pkg/state/state_test.go | 8 +- pkg/state/threadsafe_wrapper.go | 10 +- pkg/state/transaction_checker.go | 10 +- pkg/state/transaction_checker_test.go | 39 +- pkg/state/transaction_differ_test.go | 13 +- pkg/state/transaction_performer.go | 45 +- pkg/state/transaction_performer_test.go | 70 +-- 31 files changed, 1294 insertions(+), 239 deletions(-) create mode 100644 pkg/state/snapshot_hasher.go create mode 100644 pkg/state/snapshot_hasher_internal_test.go diff --git a/cmd/statehash/statehash.go b/cmd/statehash/statehash.go index 48064b2d9..b90f6e2f7 100644 --- a/cmd/statehash/statehash.go +++ b/cmd/statehash/statehash.go @@ -135,7 +135,7 @@ func run() error { zap.S().Errorf("Failed to get remote state hash at height 1: %v", err) return err } - lsh, err := st.StateHashAtHeight(1) + lsh, err := st.LegacyStateHashAtHeight(1) if err != nil { zap.S().Errorf("Failed to get local state hash at 1: %v", err) return err @@ -156,7 +156,7 @@ func run() error { } height = h } - lsh, err := st.StateHashAtHeight(height) + lsh, err := st.LegacyStateHashAtHeight(height) if err != nil { zap.S().Errorf("Failed to get state hash at %d: %v", height, err) return err @@ -178,7 +178,7 @@ func run() error { return err } zap.S().Infof("State hashes are equal at height %d", h) - lsh, err = st.StateHashAtHeight(h + 1) + lsh, err = st.LegacyStateHashAtHeight(h + 1) if err != nil { zap.S().Errorf("Failed to get state hash at %d: %v", h+1, err) return err @@ -205,7 +205,7 @@ func findLastEqualStateHashes(c *client.Client, st state.State, stop uint64) (ui var start uint64 = 1 for start <= stop { middle := (start + stop) / 2 - lsh, err = st.StateHashAtHeight(middle) + lsh, err = st.LegacyStateHashAtHeight(middle) if err != nil { return middle, err } diff --git a/pkg/api/node_api.go b/pkg/api/node_api.go index dea5037b1..65bee60f6 100644 --- a/pkg/api/node_api.go +++ b/pkg/api/node_api.go @@ -764,7 +764,7 @@ func (a *NodeApi) nodeProcesses(w http.ResponseWriter, _ *http.Request) error { } func (a *NodeApi) stateHashDebug(height proto.Height) (*proto.StateHashDebug, error) { - stateHash, err := a.state.StateHashAtHeight(height) + stateHash, err := a.state.LegacyStateHashAtHeight(height) if err != nil { return nil, errors.Wrapf(err, "failed to get state hash at height %d", height) } @@ -805,6 +805,29 @@ func (a *NodeApi) stateHashLast(w http.ResponseWriter, _ *http.Request) error { return nil } +func (a *NodeApi) snapshotStateHash(w http.ResponseWriter, r *http.Request) error { + s := chi.URLParam(r, "height") + height, err := strconv.ParseUint(s, 10, 64) + if err != nil { + // TODO(nickeskov): which error it should send? + return &BadRequestError{err} + } + sh, err := a.state.SnapshotStateHashAtHeight(height) + if err != nil { + if state.IsNotFound(err) { + return apiErrs.BlockDoesNotExist + } + return errors.Wrapf(err, "failed to get snapshot state hash at height %d", height) + } + type out struct { + StateHash proto.HexBytes `json:"stateHash"` + } + if sendErr := trySendJson(w, out{StateHash: sh.Bytes()}); sendErr != nil { + return errors.Wrap(sendErr, "snapshotStateHash") + } + return nil +} + func wavesAddressInvalidCharErr(invalidChar rune, id string) *apiErrs.CustomValidationError { return apiErrs.NewCustomValidationError( fmt.Sprintf( diff --git a/pkg/api/routes.go b/pkg/api/routes.go index 0413cdf55..2d0a8cfa8 100644 --- a/pkg/api/routes.go +++ b/pkg/api/routes.go @@ -7,8 +7,9 @@ import ( "github.com/go-chi/chi/middleware" "github.com/pkg/errors" "github.com/semrush/zenrpc/v2" - "github.com/wavesplatform/gowaves/pkg/api/metamask" "go.uber.org/zap" + + "github.com/wavesplatform/gowaves/pkg/api/metamask" ) type HandleErrorFunc func(w http.ResponseWriter, r *http.Request, err error) @@ -88,6 +89,9 @@ func (a *NodeApi) routes(opts *RunOptions) (chi.Router, error) { rAuth.Post("/load", wrapper(WalletLoadKeys(a.app))) }) + r.Route("/debug", func(r chi.Router) { + r.Get("/snapshotStateHash/{height:\\d+}", wrapper(a.snapshotStateHash)) + }) r.Get("/miner/info", wrapper(a.GoMinerInfo)) r.Get("/node/processes", wrapper(a.nodeProcesses)) diff --git a/pkg/mock/state.go b/pkg/mock/state.go index d6ff78d57..8550b64fe 100644 --- a/pkg/mock/state.go +++ b/pkg/mock/state.go @@ -568,6 +568,21 @@ func (mr *MockStateInfoMockRecorder) IsAssetExist(assetID interface{}) *gomock.C return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsAssetExist", reflect.TypeOf((*MockStateInfo)(nil).IsAssetExist), assetID) } +// LegacyStateHashAtHeight mocks base method. +func (m *MockStateInfo) LegacyStateHashAtHeight(height proto.Height) (*proto.StateHash, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "LegacyStateHashAtHeight", height) + ret0, _ := ret[0].(*proto.StateHash) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// LegacyStateHashAtHeight indicates an expected call of LegacyStateHashAtHeight. +func (mr *MockStateInfoMockRecorder) LegacyStateHashAtHeight(height interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LegacyStateHashAtHeight", reflect.TypeOf((*MockStateInfo)(nil).LegacyStateHashAtHeight), height) +} + // MapR mocks base method. func (m *MockStateInfo) MapR(arg0 func(state.StateInfo) (interface{}, error)) (interface{}, error) { m.ctrl.T.Helper() @@ -868,34 +883,34 @@ func (mr *MockStateInfoMockRecorder) ShouldPersistAddressTransactions() *gomock. return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ShouldPersistAddressTransactions", reflect.TypeOf((*MockStateInfo)(nil).ShouldPersistAddressTransactions)) } -// SnapshotsAtHeight mocks base method. -func (m *MockStateInfo) SnapshotsAtHeight(height proto.Height) (proto.BlockSnapshot, error) { +// SnapshotStateHashAtHeight mocks base method. +func (m *MockStateInfo) SnapshotStateHashAtHeight(height proto.Height) (crypto.Digest, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SnapshotsAtHeight", height) - ret0, _ := ret[0].(proto.BlockSnapshot) + ret := m.ctrl.Call(m, "SnapshotStateHashAtHeight", height) + ret0, _ := ret[0].(crypto.Digest) ret1, _ := ret[1].(error) return ret0, ret1 } -// SnapshotsAtHeight indicates an expected call of SnapshotsAtHeight. -func (mr *MockStateInfoMockRecorder) SnapshotsAtHeight(height interface{}) *gomock.Call { +// SnapshotStateHashAtHeight indicates an expected call of SnapshotStateHashAtHeight. +func (mr *MockStateInfoMockRecorder) SnapshotStateHashAtHeight(height interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SnapshotsAtHeight", reflect.TypeOf((*MockStateInfo)(nil).SnapshotsAtHeight), height) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SnapshotStateHashAtHeight", reflect.TypeOf((*MockStateInfo)(nil).SnapshotStateHashAtHeight), height) } -// StateHashAtHeight mocks base method. -func (m *MockStateInfo) StateHashAtHeight(height uint64) (*proto.StateHash, error) { +// SnapshotsAtHeight mocks base method. +func (m *MockStateInfo) SnapshotsAtHeight(height proto.Height) (proto.BlockSnapshot, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "StateHashAtHeight", height) - ret0, _ := ret[0].(*proto.StateHash) + ret := m.ctrl.Call(m, "SnapshotsAtHeight", height) + ret0, _ := ret[0].(proto.BlockSnapshot) ret1, _ := ret[1].(error) return ret0, ret1 } -// StateHashAtHeight indicates an expected call of StateHashAtHeight. -func (mr *MockStateInfoMockRecorder) StateHashAtHeight(height interface{}) *gomock.Call { +// SnapshotsAtHeight indicates an expected call of SnapshotsAtHeight. +func (mr *MockStateInfoMockRecorder) SnapshotsAtHeight(height interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "StateHashAtHeight", reflect.TypeOf((*MockStateInfo)(nil).StateHashAtHeight), height) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SnapshotsAtHeight", reflect.TypeOf((*MockStateInfo)(nil).SnapshotsAtHeight), height) } // TopBlock mocks base method. @@ -1822,6 +1837,21 @@ func (mr *MockStateMockRecorder) IsAssetExist(assetID interface{}) *gomock.Call return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsAssetExist", reflect.TypeOf((*MockState)(nil).IsAssetExist), assetID) } +// LegacyStateHashAtHeight mocks base method. +func (m *MockState) LegacyStateHashAtHeight(height proto.Height) (*proto.StateHash, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "LegacyStateHashAtHeight", height) + ret0, _ := ret[0].(*proto.StateHash) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// LegacyStateHashAtHeight indicates an expected call of LegacyStateHashAtHeight. +func (mr *MockStateMockRecorder) LegacyStateHashAtHeight(height interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LegacyStateHashAtHeight", reflect.TypeOf((*MockState)(nil).LegacyStateHashAtHeight), height) +} + // Map mocks base method. func (m *MockState) Map(arg0 func(state.NonThreadSafeState) error) error { m.ctrl.T.Helper() @@ -2190,6 +2220,21 @@ func (mr *MockStateMockRecorder) ShouldPersistAddressTransactions() *gomock.Call return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ShouldPersistAddressTransactions", reflect.TypeOf((*MockState)(nil).ShouldPersistAddressTransactions)) } +// SnapshotStateHashAtHeight mocks base method. +func (m *MockState) SnapshotStateHashAtHeight(height proto.Height) (crypto.Digest, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SnapshotStateHashAtHeight", height) + ret0, _ := ret[0].(crypto.Digest) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SnapshotStateHashAtHeight indicates an expected call of SnapshotStateHashAtHeight. +func (mr *MockStateMockRecorder) SnapshotStateHashAtHeight(height interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SnapshotStateHashAtHeight", reflect.TypeOf((*MockState)(nil).SnapshotStateHashAtHeight), height) +} + // SnapshotsAtHeight mocks base method. func (m *MockState) SnapshotsAtHeight(height proto.Height) (proto.BlockSnapshot, error) { m.ctrl.T.Helper() @@ -2219,21 +2264,6 @@ func (mr *MockStateMockRecorder) StartProvidingExtendedApi() *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "StartProvidingExtendedApi", reflect.TypeOf((*MockState)(nil).StartProvidingExtendedApi)) } -// StateHashAtHeight mocks base method. -func (m *MockState) StateHashAtHeight(height uint64) (*proto.StateHash, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "StateHashAtHeight", height) - ret0, _ := ret[0].(*proto.StateHash) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// StateHashAtHeight indicates an expected call of StateHashAtHeight. -func (mr *MockStateMockRecorder) StateHashAtHeight(height interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "StateHashAtHeight", reflect.TypeOf((*MockState)(nil).StateHashAtHeight), height) -} - // TopBlock mocks base method. func (m *MockState) TopBlock() *proto.Block { m.ctrl.T.Helper() diff --git a/pkg/proto/common.go b/pkg/proto/common.go index 0062c137a..b8ab72b3b 100644 --- a/pkg/proto/common.go +++ b/pkg/proto/common.go @@ -4,6 +4,7 @@ import ( "encoding/binary" "encoding/hex" "fmt" + "io" "math" "strings" "time" @@ -13,6 +14,27 @@ import ( var ErrNotFound = errors.New("not found") +// WriteBool writes the bool to the writer. +func WriteBool(w io.Writer, b bool) error { + var c byte + if b { + c = 1 + } else { + c = 0 + } + return WriteByte(w, c) +} + +// WriteByte writes the byte to the writer. +func WriteByte(w io.Writer, c byte) error { + if bw, ok := w.(io.ByteWriter); ok { + return bw.WriteByte(c) + } + data := [...]byte{c} + _, err := w.Write(data[:]) + return err +} + // PutStringWithUInt8Len converts the string to slice of bytes. The first byte of resulting slice contains the length of the string. func PutStringWithUInt8Len(buf []byte, s string) { sl := uint8(len(s)) @@ -42,6 +64,23 @@ func PutStringWithUInt16Len(buf []byte, s string) { copy(buf[2:], s) } +// WriteStringWithUInt16Len writes given data with 2 bytes of its length. +func WriteStringWithUInt16Len(w io.Writer, data string) error { + l := len(data) + if l > math.MaxInt16 { + return errors.Errorf("invalid data length %d", l) + } + var size [uint16Size]byte + binary.BigEndian.PutUint16(size[:], uint16(l)) + if _, err := w.Write(size[:]); err != nil { + return errors.Wrap(err, "failed to write uin16 data size") + } + if _, err := io.WriteString(w, data); err != nil { + return errors.Wrap(err, "failed to write data") + } + return nil +} + // StringWithUInt16Len reads a string from the buffer `buf`. func StringWithUInt16Len(buf []byte) (string, error) { if l := len(buf); l < 2 { @@ -91,6 +130,23 @@ func PutBytesWithUInt16Len(buf []byte, data []byte) error { return nil } +// WriteBytesWithUInt16Len writes given data with 2 bytes of its length. +func WriteBytesWithUInt16Len(w io.Writer, data []byte) error { + l := len(data) + if l > math.MaxInt16 { + return errors.Errorf("invalid data length %d", l) + } + var size [uint16Size]byte + binary.BigEndian.PutUint16(size[:], uint16(l)) + if _, err := w.Write(size[:]); err != nil { + return errors.Wrap(err, "failed to write uin16 data size") + } + if _, err := w.Write(data); err != nil { + return errors.Wrap(err, "failed to write data") + } + return nil +} + // BytesWithUInt16Len reads from buf an array of bytes of length encoded in first 2 bytes. func BytesWithUInt16Len(buf []byte) ([]byte, error) { if l := len(buf); l < 2 { diff --git a/pkg/proto/types.go b/pkg/proto/types.go index 562cf6e3d..377866e7a 100644 --- a/pkg/proto/types.go +++ b/pkg/proto/types.go @@ -63,7 +63,9 @@ const ( MaxAssetScriptActionsV3 = 30 base64EncodingSizeLimit = 1024 base64EncodingPrefix = "base64:" + uint16Size = 2 uint32Size = 4 + uint64Size = 8 ) type Timestamp = uint64 @@ -2241,6 +2243,9 @@ type DataEntry interface { MarshalValue() ([]byte, error) UnmarshalValue([]byte) error + MarshaledValueSize() int + WriteValueTo(w io.Writer) error + MarshalBinary() ([]byte, error) UnmarshalBinary([]byte) error Valid(forbidEmptyKey, utf16KeyLen bool) error @@ -2328,9 +2333,13 @@ func (e IntegerDataEntry) PayloadSize() int { return len(e.Key) + 8 // 8 == sizeof(int64) } +func (e *IntegerDataEntry) MarshaledValueSize() int { + return 1 + uint64Size +} + // MarshalValue marshals the integer data entry value in its bytes representation. func (e IntegerDataEntry) MarshalValue() ([]byte, error) { - buf := make([]byte, 1+8) + buf := make([]byte, e.MarshaledValueSize()) pos := 0 buf[pos] = byte(DataInteger) pos++ @@ -2338,6 +2347,18 @@ func (e IntegerDataEntry) MarshalValue() ([]byte, error) { return buf, nil } +func (e *IntegerDataEntry) WriteValueTo(w io.Writer) error { + if err := WriteByte(w, byte(DataInteger)); err != nil { + return errors.Wrapf(err, "failed to write data entry type (%d)", DataInteger) + } + var value [uint64Size]byte + binary.BigEndian.PutUint64(value[:], uint64(e.Value)) + if _, err := w.Write(value[:]); err != nil { + return errors.Wrap(err, "failed to write integer data entry value") + } + return nil +} + // UnmarshalValue reads binary representation of integer data entry value to the structure. func (e *IntegerDataEntry) UnmarshalValue(data []byte) error { const minLen = 1 + 8 @@ -2459,9 +2480,13 @@ func (e BooleanDataEntry) PayloadSize() int { return len(e.Key) + 1 // 1 == sizeof(bool) } +func (e *BooleanDataEntry) MarshaledValueSize() int { + return 1 + 1 +} + // MarshalValue writes a byte representation of the boolean data entry value. func (e BooleanDataEntry) MarshalValue() ([]byte, error) { - buf := make([]byte, 1+1) + buf := make([]byte, e.MarshaledValueSize()) pos := 0 buf[pos] = byte(DataBoolean) pos++ @@ -2469,6 +2494,16 @@ func (e BooleanDataEntry) MarshalValue() ([]byte, error) { return buf, nil } +func (e *BooleanDataEntry) WriteValueTo(w io.Writer) error { + if err := WriteByte(w, byte(DataBoolean)); err != nil { + return errors.Wrapf(err, "failed to write data entry type (%d)", DataBoolean) + } + if err := WriteBool(w, e.Value); err != nil { + return errors.Wrap(err, "failed to write boolean data entry value") + } + return nil +} + // UnmarshalValue reads a byte representation of the data entry value. func (e *BooleanDataEntry) UnmarshalValue(data []byte) error { const minLen = 1 + 1 @@ -2597,10 +2632,14 @@ func (e BinaryDataEntry) PayloadSize() int { return len(e.Key) + len(e.Value) } +func (e *BinaryDataEntry) MarshaledValueSize() int { + return 1 + 2 + len(e.Value) +} + // MarshalValue writes an entry value to its byte representation. func (e BinaryDataEntry) MarshalValue() ([]byte, error) { pos := 0 - buf := make([]byte, 1+2+len(e.Value)) + buf := make([]byte, e.MarshaledValueSize()) buf[pos] = byte(DataBinary) pos++ if err := PutBytesWithUInt16Len(buf[pos:], e.Value); err != nil { @@ -2609,6 +2648,16 @@ func (e BinaryDataEntry) MarshalValue() ([]byte, error) { return buf, nil } +func (e *BinaryDataEntry) WriteValueTo(w io.Writer) error { + if err := WriteByte(w, byte(DataBinary)); err != nil { + return errors.Wrapf(err, "failed to write data entry type (%d)", DataBinary) + } + if err := WriteBytesWithUInt16Len(w, e.Value); err != nil { + return errors.Wrap(err, "failed to write binary data entry value") + } + return nil +} + // UnmarshalValue reads an entry value from a binary representation. func (e *BinaryDataEntry) UnmarshalValue(data []byte) error { const minLen = 1 + 2 @@ -2737,9 +2786,13 @@ func (e StringDataEntry) PayloadSize() int { return len(e.Key) + len(e.Value) } +func (e *StringDataEntry) MarshaledValueSize() int { + return 1 + 2 + len(e.Value) +} + // MarshalValue converts the data entry value to its byte representation. func (e StringDataEntry) MarshalValue() ([]byte, error) { - buf := make([]byte, 1+2+len(e.Value)) + buf := make([]byte, e.MarshaledValueSize()) pos := 0 buf[pos] = byte(DataString) pos++ @@ -2747,6 +2800,16 @@ func (e StringDataEntry) MarshalValue() ([]byte, error) { return buf, nil } +func (e *StringDataEntry) WriteValueTo(w io.Writer) error { + if err := WriteByte(w, byte(DataString)); err != nil { + return errors.Wrapf(err, "failed to write data entry type (%d)", DataString) + } + if err := WriteStringWithUInt16Len(w, e.Value); err != nil { + return errors.Wrap(err, "failed to write string data entry value") + } + return nil +} + // UnmarshalValue reads StringDataEntry value from bytes. func (e *StringDataEntry) UnmarshalValue(data []byte) error { const minLen = 1 + 2 @@ -2871,11 +2934,22 @@ func (e DeleteDataEntry) PayloadSize() int { return 0 // this entry doesn't have any payload } +func (e *DeleteDataEntry) MarshaledValueSize() int { + return 1 +} + // MarshalValue converts the data entry value to its byte representation. func (e DeleteDataEntry) MarshalValue() ([]byte, error) { return []byte{byte(DataDelete)}, nil } +func (e *DeleteDataEntry) WriteValueTo(w io.Writer) error { + if err := WriteByte(w, byte(DataDelete)); err != nil { + return errors.Wrapf(err, "failed to write data entry type (%d)", DataDelete) + } + return nil +} + // UnmarshalValue checks DeleteDataEntry value type is set. func (e *DeleteDataEntry) UnmarshalValue(data []byte) error { const minLen = 1 diff --git a/pkg/state/api.go b/pkg/state/api.go index 6a636a1b6..83a16ab34 100644 --- a/pkg/state/api.go +++ b/pkg/state/api.go @@ -120,7 +120,8 @@ type StateInfo interface { ProvidesStateHashes() (bool, error) // State hashes. - StateHashAtHeight(height uint64) (*proto.StateHash, error) + LegacyStateHashAtHeight(height proto.Height) (*proto.StateHash, error) + SnapshotStateHashAtHeight(height proto.Height) (crypto.Digest, error) // Map on readable state. Way to apply multiple operations under same lock. MapR(func(StateInfo) (interface{}, error)) (interface{}, error) diff --git a/pkg/state/appender.go b/pkg/state/appender.go index f8d0306cf..f0c2255a7 100644 --- a/pkg/state/appender.go +++ b/pkg/state/appender.go @@ -151,10 +151,11 @@ func (a *txAppender) checkDuplicateTxIds(tx proto.Transaction, recentIds map[str } type appendBlockParams struct { - transactions []proto.Transaction - chans *verifierChans - block, parent *proto.BlockHeader - height uint64 + transactions []proto.Transaction + chans *verifierChans + block, parent *proto.BlockHeader + blockchainHeight proto.Height + lastSnapshotStateHash crypto.Digest } func (a *txAppender) orderIsScripted(order proto.Order) (bool, error) { @@ -344,20 +345,29 @@ func (a *txAppender) commitTxApplication( } currentMinerAddress := proto.MustAddressFromPublicKey(a.settings.AddressSchemeCharacter, params.currentMinerPK) - var snapshot txSnapshot + var ( + snapshot txSnapshot + status *proto.TransactionStatusSnapshot + ) if applicationRes.status { // We only perform tx in case it has not failed. - performerInfo := &performerInfo{ - height: params.checkerInfo.height, - blockID: params.checkerInfo.blockID, - currentMinerAddress: currentMinerAddress, - checkerData: applicationRes.checkerData, - stateActionsCounter: params.stateActionsCounterInBlock, - } - snapshot, err = a.txHandler.performTx(tx, performerInfo, invocationRes, applicationRes.changes.diff) + pi := newPerformerInfo( + params.checkerInfo.blockchainHeight, + params.checkerInfo.blockID, + currentMinerAddress, + applicationRes.checkerData, + ) + snapshot, err = a.txHandler.performTx(tx, pi, invocationRes, applicationRes.changes.diff) if err != nil { return txSnapshot{}, wrapErr(TxCommitmentError, errors.Errorf("failed to perform: %v", err)) } + status = &proto.TransactionStatusSnapshot{ + Status: proto.TransactionSucceeded, + } + } else { + status = &proto.TransactionStatusSnapshot{ + Status: proto.TransactionFailed, + } } if params.validatingUtx { // Save transaction to in-mem storage. @@ -367,10 +377,13 @@ func (a *txAppender) commitTxApplication( ) } } else { + // TODO: snapshots for miner fee should be generated here, but not saved + // They must be saved in snapshot applier // Count tx fee. if err := a.blockDiffer.countMinerFee(tx); err != nil { return txSnapshot{}, wrapErr(TxCommitmentError, errors.Errorf("failed to count miner fee: %v", err)) } + // TODO: tx MUST be saved in snapshotApplier // Save transaction to storage. if err = a.rw.writeTransaction(tx, !applicationRes.status); err != nil { return txSnapshot{}, wrapErr(TxCommitmentError, @@ -378,7 +391,7 @@ func (a *txAppender) commitTxApplication( ) } } - // TODO: transaction status snapshot has to be appended here + snapshot.regular = append(snapshot.regular, status) return snapshot, nil } @@ -422,7 +435,6 @@ type appendTxParams struct { blockRewardDistributionActivated bool invokeExpressionActivated bool // TODO: check feature naming validatingUtx bool // if validatingUtx == false then chans MUST be initialized with non nil value - stateActionsCounterInBlock *proto.StateActionsCounter currentMinerPK crypto.PublicKey } @@ -479,7 +491,7 @@ func (a *txAppender) handleEthTx( if err != nil { return nil, nil, false, errors.Wrapf(err, "failed to handle ethereum transaction (type %s) with id %s, on height %d", - ethTx.TxKind.String(), ethTx.ID.String(), params.checkerInfo.height+1) + ethTx.TxKind.String(), ethTx.ID.String(), params.checkerInfo.blockchainHeight+1) } // In UTX balances are always validated. needToValidateBalanceDiff = params.validatingUtx @@ -493,7 +505,7 @@ func (a *txAppender) handleEthTx( if err != nil { return nil, nil, false, errors.Wrapf(err, "failed to handle ethereum invoke script transaction (type %s) with id %s, on height %d", - ethTx.TxKind.String(), ethTx.ID.String(), params.checkerInfo.height+1) + ethTx.TxKind.String(), ethTx.ID.String(), params.checkerInfo.blockchainHeight+1) } default: return nil, nil, false, errors.Errorf("Undefined ethereum transaction kind %T", ethTx.TxKind) @@ -643,6 +655,24 @@ func (a *txAppender) createInitialBlockSnapshot(minerAndRewardDiff txDiff) (txSn return snapshot, nil } +func calculateInitialSnapshotStateHash( + h *txSnapshotHasher, + blockHasParent bool, + blockHeight proto.Height, + prevHash crypto.Digest, + txSnapshot []proto.AtomicSnapshot, +) (crypto.Digest, error) { + if !blockHasParent { // processing genesis block + if len(txSnapshot) != 0 { // sanity check + return crypto.Digest{}, errors.New("initial block snapshot for genesis block must be empty") + } + return prevHash, nil // return initial state hash as is + } + // TODO: can initial txSnapshot be empty? (at least before NG activation) + var txID []byte // txID is necessary only for txStatus atomic snapshot; init snapshot can't have such message + return calculateTxSnapshotStateHash(h, txID, blockHeight, prevHash, txSnapshot) +} + func (a *txAppender) appendBlock(params *appendBlockParams) error { // Reset block complexity counter. defer func() { @@ -665,7 +695,7 @@ func (a *txAppender) appendBlock(params *appendBlockParams) error { currentTimestamp: params.block.Timestamp, blockID: params.block.BlockID(), blockVersion: params.block.Version, - height: params.height, + blockchainHeight: params.blockchainHeight, rideV5Activated: rideV5Activated, rideV6Activated: rideV6Activated, blockRewardDistribution: blockRewardDistribution, @@ -687,11 +717,38 @@ func (a *txAppender) appendBlock(params *appendBlockParams) error { return err } // create the initial snapshot - _, err = a.createInitialBlockSnapshot(minerAndRewardDiff) + initialSnapshot, err := a.createInitialBlockSnapshot(minerAndRewardDiff) if err != nil { return errors.Wrap(err, "failed to create initial snapshot") } + blockInfo, err := a.currentBlockInfo() + if err != nil { + return errors.Wrapf(err, "failed to get current block info, blockchain height is %d", params.blockchainHeight) + } + + currentBlockHeight := blockInfo.Height + + hasher, err := newTxSnapshotHasherDefault() + if err != nil { + return errors.Wrapf(err, "failed to create tx snapshot default hasher, block height is %d", currentBlockHeight) + } + defer hasher.Release() + + // get initial snapshot hash for block + stateHash, err := calculateInitialSnapshotStateHash( + hasher, + hasParent, + currentBlockHeight, + params.lastSnapshotStateHash, // previous block state hash + initialSnapshot.regular, + ) + if err != nil { + return errors.Wrapf(err, "failed to calculate initial snapshot hash for blockID %q at height %d", + params.block.BlockID(), currentBlockHeight, + ) + } + // TODO apply this snapshot when balances are refatored // err = initialSnapshot.Apply(&snapshotApplier) @@ -699,10 +756,6 @@ func (a *txAppender) appendBlock(params *appendBlockParams) error { if err = a.diffStor.saveTxDiff(minerAndRewardDiff); err != nil { return err } - blockInfo, err := a.currentBlockInfo() - if err != nil { - return err - } blockV5Activated, err := a.stor.features.newestIsActivated(int16(settings.BlockV5)) if err != nil { return err @@ -720,7 +773,7 @@ func (a *txAppender) appendBlock(params *appendBlockParams) error { return err } // Check and append transactions. - var blockSnapshots proto.BlockSnapshot + var bs proto.BlockSnapshot for _, tx := range params.transactions { appendTxArgs := &appendTxParams{ @@ -736,17 +789,37 @@ func (a *txAppender) appendBlock(params *appendBlockParams) error { blockRewardDistributionActivated: blockRewardDistributionActivated, invokeExpressionActivated: invokeExpressionActivated, validatingUtx: false, - stateActionsCounterInBlock: stateActionsCounterInBlockValidation, currentMinerPK: params.block.GeneratorPublicKey, } txSnapshots, errAppendTx := a.appendTx(tx, appendTxArgs) if errAppendTx != nil { return errAppendTx } - blockSnapshots.AppendTxSnapshot(txSnapshots.regular) + bs.AppendTxSnapshot(txSnapshots.regular) + + txID, idErr := tx.GetID(a.settings.AddressSchemeCharacter) + if idErr != nil { + return idErr + } + + if len(txSnapshots.regular) == 0 { // sanity check + return errors.Errorf("snapshot of txID %q cannot be empty", base58.Encode(txID)) + } + txSh, shErr := calculateTxSnapshotStateHash(hasher, txID, currentBlockHeight, stateHash, txSnapshots.regular) + if shErr != nil { + return errors.Wrapf(shErr, "failed to calculate tx snapshot hash for txID %q at height %d", + base58.Encode(txID), currentBlockHeight, + ) + } + stateHash = txSh // update stateHash in order to accumulate state hashes into block snapshot hash } - if err = a.stor.snapshots.saveSnapshots(params.block.BlockID(), params.height, blockSnapshots); err != nil { - return err + blockID := params.block.BlockID() + if ssErr := a.stor.snapshots.saveSnapshots(params.block.BlockID(), currentBlockHeight, bs); ssErr != nil { + return ssErr + } + // TODO: check snapshot hash with the block snapshot hash if it exists + if shErr := a.stor.stateHashes.saveSnapshotStateHash(stateHash, currentBlockHeight, blockID); shErr != nil { + return errors.Wrapf(shErr, "failed to save block shasnpt hash at height %d", currentBlockHeight) } // Save fee distribution of this block. // This will be needed for createMinerAndRewardDiff() of next block due to NG. @@ -963,7 +1036,7 @@ func (a *txAppender) validateNextTx(tx proto.Transaction, currentTimestamp, pare parentTimestamp: parentTimestamp, blockID: block.BlockID(), blockVersion: version, - height: blockInfo.Height, + blockchainHeight: blockInfo.Height, rideV5Activated: rideV5Activated, rideV6Activated: rideV6Activated, blockRewardDistribution: blockRewardDistribution, @@ -984,6 +1057,7 @@ func (a *txAppender) validateNextTx(tx proto.Transaction, currentTimestamp, pare if err != nil { return errs.Extend(err, "failed to check 'InvokeExpression' is activated") // TODO: check feature naming in err message } + // it's correct to use new counter because there's no block exists, but this field is necessary in tx performer issueCounterInBlock := new(proto.StateActionsCounter) snapshotApplierInfo := newBlockSnapshotsApplierInfo(checkerInfo, a.settings.AddressSchemeCharacter, issueCounterInBlock) @@ -1002,8 +1076,6 @@ func (a *txAppender) validateNextTx(tx proto.Transaction, currentTimestamp, pare blockRewardDistributionActivated: blockRewardDistributionActivated, invokeExpressionActivated: invokeExpressionActivated, validatingUtx: true, - // it's correct to use new counter because there's no block exists, but this field is necessary in tx performer - stateActionsCounterInBlock: issueCounterInBlock, } _, err = a.appendTx(tx, appendTxArgs) if err != nil { diff --git a/pkg/state/common_test.go b/pkg/state/common_test.go index 73f190596..f985a03c9 100644 --- a/pkg/state/common_test.go +++ b/pkg/state/common_test.go @@ -133,12 +133,11 @@ func defaultDifferInfo() *differInfo { func defaultAppendTxParams() *appendTxParams { return &appendTxParams{ - checkerInfo: defaultCheckerInfo(), - blockInfo: defaultBlockInfo(), - block: defaultBlock(), - acceptFailed: false, - validatingUtx: false, - stateActionsCounterInBlock: new(proto.StateActionsCounter), + checkerInfo: defaultCheckerInfo(), + blockInfo: defaultBlockInfo(), + block: defaultBlock(), + acceptFailed: false, + validatingUtx: false, } } diff --git a/pkg/state/constants.go b/pkg/state/constants.go index a33ea3020..1c8197fa3 100644 --- a/pkg/state/constants.go +++ b/pkg/state/constants.go @@ -25,7 +25,7 @@ const ( // StateVersion is current version of state internal storage formats. // It increases when backward compatibility with previous storage version is lost. - StateVersion = 20 + StateVersion = 21 // Memory limit for address transactions. flush() is called when this // limit is exceeded. diff --git a/pkg/state/ethereum_tx_test.go b/pkg/state/ethereum_tx_test.go index b8cdd4bb2..626a671c7 100644 --- a/pkg/state/ethereum_tx_test.go +++ b/pkg/state/ethereum_tx_test.go @@ -78,7 +78,7 @@ func defaultTxAppender(t *testing.T, storage scriptStorageState, state types.Sma newBlockSnapshotsApplierInfo( params.checkerInfo, blockchainSettings.AddressSchemeCharacter, - params.stateActionsCounterInBlock, + new(proto.StateActionsCounter), ), newSnapshotApplierStorages(stor.entities), ) diff --git a/pkg/state/fee_validation_test.go b/pkg/state/fee_validation_test.go index 8528d8fba..bd719b91c 100644 --- a/pkg/state/fee_validation_test.go +++ b/pkg/state/fee_validation_test.go @@ -65,7 +65,7 @@ func TestAccountHasVerifierAfterRollbackFilterFalse(t *testing.T) { address, err := proto.NewAddressFromPublicKey(to.tc.settings.AddressSchemeCharacter, tx.SenderPK) assert.NoError(t, err, "failed to receive an address from public key") - txPerformerInfo := defaultPerformerInfo(to.stateActionsCounter) + txPerformerInfo := defaultPerformerInfo() txPerformerInfo.blockID = blockID2 info.blockID = blockID2 // the block from checker info is used by snapshot applier to apply a tx txPerformerInfo.checkerData = checkerData @@ -103,7 +103,7 @@ func TestAccountDoesNotHaveScriptAfterRollbackFilterTrue(t *testing.T) { address, err := proto.NewAddressFromPublicKey(to.tc.settings.AddressSchemeCharacter, tx.SenderPK) assert.NoError(t, err, "failed to receive an address from public key") - txPerformerInfo := defaultPerformerInfo(to.stateActionsCounter) + txPerformerInfo := defaultPerformerInfo() txPerformerInfo.blockID = blockID2 info.blockID = blockID2 // the block from checker info is used by snapshot applier to apply a tx txPerformerInfo.checkerData.scriptEstimation = &scriptEstimation{} diff --git a/pkg/state/history_storage.go b/pkg/state/history_storage.go index cc26ee1d9..33ce154b8 100644 --- a/pkg/state/history_storage.go +++ b/pkg/state/history_storage.go @@ -38,7 +38,8 @@ const ( rewardChanges invokeResult score - stateHash + legacyStateHash + snapshotStateHash hitSource feeDistr snapshots @@ -168,7 +169,12 @@ var properties = map[blockchainEntity]blockchainEntityProperties{ needToCut: true, fixedSize: false, }, - stateHash: { + legacyStateHash: { + needToFilter: true, + needToCut: true, + fixedSize: false, + }, + snapshotStateHash: { needToFilter: true, needToCut: true, fixedSize: false, diff --git a/pkg/state/invoke_applier.go b/pkg/state/invoke_applier.go index abf1e7286..fd15663ed 100644 --- a/pkg/state/invoke_applier.go +++ b/pkg/state/invoke_applier.go @@ -344,7 +344,7 @@ func (ia *invokeApplier) fallibleValidation(tx proto.Transaction, info *addlInvo Scheme: ia.settings.AddressSchemeCharacter, ScriptAddress: info.scriptAddr, } - validatePayments := info.checkerInfo.height > ia.settings.InternalInvokePaymentsValidationAfterHeight + validatePayments := info.checkerInfo.blockchainHeight > ia.settings.InternalInvokePaymentsValidationAfterHeight if err := proto.ValidateActions(info.actions, restrictions, info.rideV6Activated, info.libVersion, validatePayments); err != nil { return proto.DAppError, info.failedChanges, err } @@ -775,9 +775,10 @@ func (ia *invokeApplier) handleInvokeFunctionError( return invocationRes, applicationRes, err case ride.InternalInvocationError: + blockchainHeight := info.checkerInfo.blockchainHeight // Special script error produced by internal script invocation or application of results. // Reject transaction after certain height - rejectOnInvocationError := info.checkerInfo.height >= ia.settings.InternalInvokeCorrectFailRejectBehaviourAfterHeight + rejectOnInvocationError := blockchainHeight >= ia.settings.InternalInvokeCorrectFailRejectBehaviourAfterHeight if !info.acceptFailed || rejectOnInvocationError || isCheap { return nil, nil, errors.Wrapf( err, "transaction rejected with spent complexity %d and following call stack:\n%s", @@ -921,7 +922,7 @@ func (ia *invokeApplier) handleFallibleValidationError(err error, // If fallibleValidation fails, we should save transaction to blockchain when acceptFailed is true. if !info.acceptFailed || (ia.sc.recentTxComplexity <= FailFreeInvokeComplexity && - info.checkerInfo.height >= ia.settings.InternalInvokeCorrectFailRejectBehaviourAfterHeight) { + info.checkerInfo.blockchainHeight >= ia.settings.InternalInvokeCorrectFailRejectBehaviourAfterHeight) { return nil, err } invocationRes = &invocationResult{ diff --git a/pkg/state/invoke_applier_test.go b/pkg/state/invoke_applier_test.go index 622c9a1b3..730e6e7eb 100644 --- a/pkg/state/invoke_applier_test.go +++ b/pkg/state/invoke_applier_test.go @@ -686,7 +686,8 @@ func TestFailedApplyInvokeScript(t *testing.T) { info := to.fallibleValidationParams(t) info.acceptFailed = true info.blockV5Activated = true - info.checkerInfo.height = 3_000_000 // We have to move height forward here because MainNet settings are used and height must be more than 2792473 + // We have to move height forward here because MainNet settings are used and height must be more than 2792473 + info.checkerInfo.blockchainHeight = 3_000_000 to.setDApp(t, "ride4_asset.base64", testGlobal.recipientInfo) to.setAndCheckInitialWavesBalance(t, testGlobal.senderInfo.addr, invokeFee*3) @@ -756,7 +757,7 @@ func TestFailedInvokeApplicationComplexity(t *testing.T) { infoAfter.acceptFailed = true infoAfter.blockV5Activated = true infoAfter.rideV5Activated = true - infoAfter.checkerInfo.height = 2_800_000 + infoAfter.checkerInfo.blockchainHeight = 2_800_000 to.setDApp(t, "ride5_recursive_invoke.base64", testGlobal.recipientInfo) @@ -841,7 +842,7 @@ func TestFailedInvokeApplicationComplexityAfterRideV6(t *testing.T) { info.acceptFailed = true info.blockV5Activated = true info.rideV5Activated = true - info.checkerInfo.height = 2_800_000 + info.checkerInfo.blockchainHeight = 2_800_000 info.rideV6Activated = true to.setDApp(t, "ride5_recursive_invoke.base64", testGlobal.recipientInfo) @@ -1101,7 +1102,7 @@ func TestFailRejectOnThrow(t *testing.T) { info.acceptFailed = true info.blockV5Activated = true info.rideV5Activated = true - info.checkerInfo.height = 2_800_000 + info.checkerInfo.blockchainHeight = 2_800_000 to.setDApp(t, "ride5_fail_on_throw.base64", testGlobal.recipientInfo) to.setAndCheckInitialWavesBalance(t, testGlobal.senderInfo.addr, invokeFee*3) diff --git a/pkg/state/keys.go b/pkg/state/keys.go index 97ea22f38..e2eda7a75 100644 --- a/pkg/state/keys.go +++ b/pkg/state/keys.go @@ -16,16 +16,18 @@ const ( // Key sizes. minAccountsDataStorKeySize = 1 + 8 + 2 + 1 - wavesBalanceKeySize = 1 + proto.AddressIDSize - assetBalanceKeySize = 1 + proto.AddressIDSize + proto.AssetIDSize - leaseKeySize = 1 + crypto.DigestSize - aliasKeySize = 1 + 2 + proto.AliasMaxLength - addressToAliasesKeySize = 1 + proto.AddressIDSize - disabledAliasKeySize = 1 + 2 + proto.AliasMaxLength - approvedFeaturesKeySize = 1 + 2 - votesFeaturesKeySize = 1 + 2 - invokeResultKeySize = 1 + crypto.DigestSize - snapshotKeySize = 1 + 8 + wavesBalanceKeySize = 1 + proto.AddressIDSize + assetBalanceKeySize = 1 + proto.AddressIDSize + proto.AssetIDSize + leaseKeySize = 1 + crypto.DigestSize + aliasKeySize = 1 + 2 + proto.AliasMaxLength + addressToAliasesKeySize = 1 + proto.AddressIDSize + disabledAliasKeySize = 1 + 2 + proto.AliasMaxLength + approvedFeaturesKeySize = 1 + 2 + votesFeaturesKeySize = 1 + 2 + invokeResultKeySize = 1 + crypto.DigestSize + legacyStateHashKeySize = 1 + 8 + snapshotStateHashKeySize = 1 + 8 + snapshotKeySize = 1 + 8 ) // Primary prefixes for storage keys @@ -120,7 +122,8 @@ const ( rwProtobufInfoKeyPrefix // Stores state hashes at height. - stateHashKeyPrefix + legacyStateHashKeyPrefix + snapshotStateHashKeyPrefix // Hit source data. hitSourceKeyPrefix @@ -175,8 +178,10 @@ func prefixByEntity(entity blockchainEntity) ([]byte, error) { return []byte{invokeResultKeyPrefix}, nil case score: return []byte{scoreKeyPrefix}, nil - case stateHash: - return []byte{stateHashKeyPrefix}, nil + case legacyStateHash: + return []byte{legacyStateHashKeyPrefix}, nil + case snapshotStateHash: + return []byte{snapshotStateHashKeyPrefix}, nil case hitSource: return []byte{hitSourceKeyPrefix}, nil case feeDistr: @@ -669,13 +674,24 @@ func (k *invokeResultKey) bytes() []byte { return res } -type stateHashKey struct { +type legacyStateHashKey struct { height uint64 } -func (k *stateHashKey) bytes() []byte { - buf := make([]byte, 9) - buf[0] = stateHashKeyPrefix +func (k *legacyStateHashKey) bytes() []byte { + buf := make([]byte, legacyStateHashKeySize) + buf[0] = legacyStateHashKeyPrefix + binary.BigEndian.PutUint64(buf[1:], k.height) + return buf +} + +type snapshotStateHashKey struct { + height uint64 +} + +func (k *snapshotStateHashKey) bytes() []byte { + buf := make([]byte, snapshotStateHashKeySize) + buf[0] = snapshotStateHashKeyPrefix binary.BigEndian.PutUint64(buf[1:], k.height) return buf } diff --git a/pkg/state/script_caller.go b/pkg/state/script_caller.go index 6494827e2..dd81cc36b 100644 --- a/pkg/state/script_caller.go +++ b/pkg/state/script_caller.go @@ -389,7 +389,7 @@ func (a *scriptCaller) invokeFunctionByEthereumTx( abiPayments := tx.TxKind.DecodedData().Payments scriptPayments := make([]proto.ScriptPayment, 0, len(abiPayments)) for _, p := range abiPayments { - if p.Amount <= 0 && info.checkerInfo.height > a.settings.InvokeNoZeroPaymentsAfterHeight { + if p.Amount <= 0 && info.checkerInfo.blockchainHeight > a.settings.InvokeNoZeroPaymentsAfterHeight { return nil, proto.FunctionCall{}, errors.Errorf("invalid payment amount '%d'", p.Amount) } optAsset := proto.NewOptionalAsset(p.PresentAssetID, p.AssetID) diff --git a/pkg/state/snapshot_applier.go b/pkg/state/snapshot_applier.go index 076393560..2ac175834 100644 --- a/pkg/state/snapshot_applier.go +++ b/pkg/state/snapshot_applier.go @@ -120,7 +120,7 @@ func (s blockSnapshotsApplierInfo) BlockID() proto.BlockID { } func (s blockSnapshotsApplierInfo) BlockchainHeight() proto.Height { - return s.ci.height + return s.ci.blockchainHeight } func (s blockSnapshotsApplierInfo) CurrentBlockHeight() proto.Height { @@ -195,7 +195,7 @@ func (a *blockSnapshotsApplier) ApplyNewAsset(snapshot proto.NewAssetSnapshot) e issuer: snapshot.IssuerPublicKey, decimals: snapshot.Decimals, issueHeight: height, - issueSequenceInBlock: a.info.stateActionsCounter.NextIssueActionNumber(), + issueSequenceInBlock: a.info.StateActionsCounter().NextIssueActionNumber(), }, assetChangeableInfo: assetChangeableInfo{}, } @@ -316,7 +316,7 @@ func (a *blockSnapshotsApplier) ApplyCancelledLease(snapshot proto.CancelledLeas } func (a *blockSnapshotsApplier) ApplyTransactionsStatus(_ proto.TransactionStatusSnapshot) error { - return nil // no-op + return nil // TODO: tx MUST be saved here } func (a *blockSnapshotsApplier) ApplyDAppComplexity(snapshot InternalDAppComplexitySnapshot) error { diff --git a/pkg/state/snapshot_generator.go b/pkg/state/snapshot_generator.go index e9a1b8667..e31c0fe81 100644 --- a/pkg/state/snapshot_generator.go +++ b/pkg/state/snapshot_generator.go @@ -200,8 +200,11 @@ func (sg *snapshotGenerator) generateSnapshotForExchangeTx(sellOrder proto.Order return snapshot, nil } -func (sg *snapshotGenerator) generateSnapshotForLeaseTx(lease *leasing, leaseID crypto.Digest, - originalTxID *crypto.Digest, balanceChanges txDiff) (txSnapshot, error) { +func (sg *snapshotGenerator) generateSnapshotForLeaseTx( + lease *leasing, + leaseID crypto.Digest, + balanceChanges txDiff, +) (txSnapshot, error) { var err error snapshot, err := sg.generateBalancesSnapshot(balanceChanges) if err != nil { @@ -217,7 +220,7 @@ func (sg *snapshotGenerator) generateSnapshotForLeaseTx(lease *leasing, leaseID leaseStatusActiveSnapshot := &InternalNewLeaseInfoSnapshot{ LeaseID: leaseID, OriginHeight: lease.OriginHeight, - OriginTransactionID: originalTxID, + OriginTransactionID: lease.OriginTransactionID, } snapshot.regular = append(snapshot.regular, leaseStatusSnapshot) snapshot.internal = append(snapshot.internal, leaseStatusActiveSnapshot) @@ -227,7 +230,7 @@ func (sg *snapshotGenerator) generateSnapshotForLeaseTx(lease *leasing, leaseID func (sg *snapshotGenerator) generateSnapshotForLeaseCancelTx( txID *crypto.Digest, leaseID crypto.Digest, - cancelHeight uint64, + cancelHeight proto.Height, balanceChanges txDiff, ) (txSnapshot, error) { var err error diff --git a/pkg/state/snapshot_generator_internal_test.go b/pkg/state/snapshot_generator_internal_test.go index 0a885645f..50a39a943 100644 --- a/pkg/state/snapshot_generator_internal_test.go +++ b/pkg/state/snapshot_generator_internal_test.go @@ -36,7 +36,7 @@ func defaultAssetInfoTransfer(tail [12]byte, reissuable bool, } func defaultPerformerInfoWithChecker(checkerData txCheckerData) *performerInfo { - return &performerInfo{0, blockID0, proto.WavesAddress{}, new(proto.StateActionsCounter), checkerData} + return &performerInfo{0, blockID0, proto.WavesAddress{}, checkerData} } func customCheckerInfo() *checkerInfo { @@ -46,14 +46,14 @@ func customCheckerInfo() *checkerInfo { parentTimestamp: defaultTimestamp - settings.MainNetSettings.MaxTxTimeBackOffset/2, blockID: blockID0, blockVersion: defaultBlockInfo.Version, - height: defaultBlockInfo.Height, + blockchainHeight: defaultBlockInfo.Height, } } func createCheckerCustomTestObjects(t *testing.T, differ *differTestObjects) *checkerTestObjects { tc, err := newTransactionChecker(proto.NewBlockIDFromSignature(genSig), differ.stor.entities, settings.MainNetSettings) require.NoError(t, err, "newTransactionChecker() failed") - return &checkerTestObjects{differ.stor, tc, differ.tp, differ.stateActionsCounter} + return &checkerTestObjects{differ.stor, tc, differ.tp} } func txSnapshotsEqual(t *testing.T, expected, actual txSnapshot) { @@ -82,7 +82,7 @@ func TestDefaultTransferWavesAndAssetSnapshot(t *testing.T) { assert.NoError(t, err, "createDiffTransferWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} transactionSnapshot, err := to.tp.performTransferWithSig(tx, - defaultPerformerInfo(to.stateActionsCounter), nil, applicationRes.changes.diff) + defaultPerformerInfo(), nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform transfer tx") expectedSnapshot := txSnapshot{ regular: []proto.AtomicSnapshot{ @@ -127,7 +127,7 @@ func TestDefaultIssueTransactionSnapshot(t *testing.T) { assert.NoError(t, err, "createDiffIssueWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} transactionSnapshot, err := to.tp.performIssueWithSig(tx, - defaultPerformerInfo(to.stateActionsCounter), nil, applicationRes.changes.diff) + defaultPerformerInfo(), nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform issue tx") expectedSnapshot := txSnapshot{ @@ -195,7 +195,7 @@ func TestDefaultReissueSnapshot(t *testing.T) { assert.NoError(t, err, "createDiffReissueWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} transactionSnapshot, err := to.tp.performReissueWithSig(tx, - defaultPerformerInfo(to.stateActionsCounter), nil, applicationRes.changes.diff) + defaultPerformerInfo(), nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform reissue tx") expectedSnapshot := txSnapshot{ @@ -252,7 +252,7 @@ func TestDefaultBurnSnapshot(t *testing.T) { assert.NoError(t, err, "createDiffBurnWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} transactionSnapshot, err := to.tp.performBurnWithSig(tx, - defaultPerformerInfo(to.stateActionsCounter), nil, applicationRes.changes.diff) + defaultPerformerInfo(), nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform burn tx") expectedSnapshot := txSnapshot{ @@ -337,7 +337,7 @@ func TestDefaultExchangeTransaction(t *testing.T) { ch, err := to.td.createDiffExchange(tx, defaultDifferInfo()) assert.NoError(t, err, "createDiffBurnWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} - transactionSnapshot, err := to.tp.performExchange(tx, defaultPerformerInfo(to.stateActionsCounter), + transactionSnapshot, err := to.tp.performExchange(tx, defaultPerformerInfo(), nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform burn tx") @@ -415,8 +415,8 @@ func TestDefaultLeaseSnapshot(t *testing.T) { ch, err := to.td.createDiffLeaseWithSig(tx, defaultDifferInfo()) assert.NoError(t, err, "createDiffBurnWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} - transactionSnapshot, err := to.tp.performLeaseWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), - nil, applicationRes.changes.diff) + pi := defaultPerformerInfo() + transactionSnapshot, err := to.tp.performLeaseWithSig(tx, pi, nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform burn tx") expectedSnapshot := txSnapshot{ @@ -449,7 +449,7 @@ func TestDefaultLeaseSnapshot(t *testing.T) { internal: []internalSnapshot{ &InternalNewLeaseInfoSnapshot{ LeaseID: *tx.ID, - OriginHeight: 0, + OriginHeight: pi.blockHeight(), OriginTransactionID: tx.ID, }, }, @@ -493,8 +493,8 @@ func TestDefaultLeaseCancelSnapshot(t *testing.T) { ch, err := to.td.createDiffLeaseCancelWithSig(tx, defaultDifferInfo()) assert.NoError(t, err, "createDiffBurnWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} - transactionSnapshot, err := to.tp.performLeaseCancelWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), - nil, applicationRes.changes.diff) + pi := defaultPerformerInfo() + transactionSnapshot, err := to.tp.performLeaseCancelWithSig(tx, pi, nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform burn tx") expectedSnapshot := txSnapshot{ @@ -524,7 +524,7 @@ func TestDefaultLeaseCancelSnapshot(t *testing.T) { internal: []internalSnapshot{ &InternalCancelledLeaseInfoSnapshot{ LeaseID: leaseID, - CancelHeight: 0, + CancelHeight: pi.blockHeight(), CancelTransactionID: tx.ID, }, }, @@ -551,7 +551,7 @@ func TestDefaultCreateAliasSnapshot(t *testing.T) { ch, err := to.td.createDiffCreateAliasWithSig(tx, defaultDifferInfo()) assert.NoError(t, err, "createDiffBurnWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} - transactionSnapshot, err := to.tp.performCreateAliasWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), + transactionSnapshot, err := to.tp.performCreateAliasWithSig(tx, defaultPerformerInfo(), nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform burn tx") @@ -602,7 +602,7 @@ func TestDefaultDataSnapshot(t *testing.T) { ch, err := to.td.createDiffDataWithProofs(tx, defaultDifferInfo()) assert.NoError(t, err, "createDiffBurnWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} - transactionSnapshot, err := to.tp.performDataWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), + transactionSnapshot, err := to.tp.performDataWithProofs(tx, defaultPerformerInfo(), nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform burn tx") @@ -648,7 +648,7 @@ func TestDefaultSponsorshipSnapshot(t *testing.T) { assert.NoError(t, err, "createDiffBurnWithSig() failed") applicationRes := &applicationResult{changes: ch, checkerData: txCheckerData{}} transactionSnapshot, err := to.tp.performSponsorshipWithProofs(tx, - defaultPerformerInfo(to.stateActionsCounter), nil, applicationRes.changes.diff) + defaultPerformerInfo(), nil, applicationRes.changes.diff) assert.NoError(t, err, "failed to perform burn tx") expectedSnapshot := txSnapshot{ diff --git a/pkg/state/snapshot_hasher.go b/pkg/state/snapshot_hasher.go new file mode 100644 index 000000000..d0bb1b203 --- /dev/null +++ b/pkg/state/snapshot_hasher.go @@ -0,0 +1,454 @@ +package state + +import ( + "bytes" + "encoding/binary" + "hash" + "sort" + + "github.com/pkg/errors" + "github.com/valyala/bytebufferpool" + + "github.com/wavesplatform/gowaves/pkg/crypto" + "github.com/wavesplatform/gowaves/pkg/proto" +) + +const ( + uint32Size = 4 // must be equal sizeof(uint32) + uint64Size = 8 // must be equal sizeof(uint64) +) + +type hashEntry struct { + _ struct{} + data *bytebufferpool.ByteBuffer +} + +func (e *hashEntry) Release() { + if e.data != nil { + bytebufferpool.Put(e.data) + e.data = nil + } +} + +type hashEntries []hashEntry + +func (h hashEntries) Len() int { return len(h) } + +func (h hashEntries) Less(i, j int) bool { return bytes.Compare(h[i].data.B, h[j].data.B) == -1 } + +func (h hashEntries) Swap(i, j int) { h[i], h[j] = h[j], h[i] } + +type txSnapshotHasher struct { + fastHasher hash.Hash + hashEntries hashEntries + blockHeight proto.Height + transactionID []byte +} + +var _ = proto.SnapshotApplier((*txSnapshotHasher)(nil)) // use the same interface for applying and hashing + +func newTxSnapshotHasherDefault() (*txSnapshotHasher, error) { + return newTxSnapshotHasher(0, nil) +} + +func newTxSnapshotHasher(blockHeight proto.Height, transactionID []byte) (*txSnapshotHasher, error) { + fastHasher, err := crypto.NewFastHash() + if err != nil { + return nil, err + } + return &txSnapshotHasher{ + fastHasher: fastHasher, + hashEntries: nil, + blockHeight: blockHeight, + transactionID: transactionID, + }, nil +} + +func calculateTxSnapshotStateHash( + h *txSnapshotHasher, + txID []byte, + blockHeight proto.Height, + prevHash crypto.Digest, + txSnapshot []proto.AtomicSnapshot, +) (crypto.Digest, error) { + h.Reset(blockHeight, txID) // reset hasher before using + + for i, snapshot := range txSnapshot { + if err := snapshot.Apply(h); err != nil { + return crypto.Digest{}, errors.Wrapf(err, "failed to apply to hasher %d-th snapshot (%T)", + i+1, snapshot, + ) + } + } + return h.CalculateHash(prevHash) +} + +func writeUint32BigEndian(w *bytebufferpool.ByteBuffer, v uint32) error { + var buf [uint32Size]byte + binary.BigEndian.PutUint32(buf[:], v) + _, err := w.Write(buf[:]) + return err +} + +func writeUint64BigEndian(w *bytebufferpool.ByteBuffer, v uint64) error { + var buf [uint64Size]byte + binary.BigEndian.PutUint64(buf[:], v) + _, err := w.Write(buf[:]) + return err +} + +func writeBool(w *bytebufferpool.ByteBuffer, v bool) error { + var b byte + if v { + b = 1 + } + return w.WriteByte(b) +} + +// Release releases the hasher and sets its state to default. +func (h *txSnapshotHasher) Release() { + for _, e := range h.hashEntries { + e.Release() + } + h.hashEntries = h.hashEntries[:0] + h.blockHeight = 0 + h.transactionID = nil + h.fastHasher.Reset() +} + +// Reset releases the hasher and sets a new state. +func (h *txSnapshotHasher) Reset(blockHeight proto.Height, transactionID []byte) { + h.Release() + h.blockHeight = blockHeight + h.transactionID = transactionID +} + +func (h *txSnapshotHasher) CalculateHash(prevHash crypto.Digest) (crypto.Digest, error) { + defer h.fastHasher.Reset() // reset saved hasher + // scala node uses stable sort, thought it's unnecessary to use stable sort because: + // - every byte sequence is unique for each snapshot + // - if two byte sequences are equal then they are indistinguishable and order doesn't matter + sort.Sort(h.hashEntries) + + for i, entry := range h.hashEntries { + if _, err := h.fastHasher.Write(entry.data.Bytes()); err != nil { + return crypto.Digest{}, errors.Wrapf(err, "failed to write to hasher %d-th hash entry", i) + } + } + var txSnapshotsDigest crypto.Digest + h.fastHasher.Sum(txSnapshotsDigest[:0]) + + h.fastHasher.Reset() // reuse the same hasher + if _, err := h.fastHasher.Write(prevHash[:]); err != nil { + return crypto.Digest{}, errors.Wrapf(err, "failed to write to hasher previous tx state snapshot hash") + } + if _, err := h.fastHasher.Write(txSnapshotsDigest[:]); err != nil { + return crypto.Digest{}, errors.Wrapf(err, "failed to write to hasher current tx snapshots hash") + } + var newHash crypto.Digest + h.fastHasher.Sum(newHash[:0]) + + return newHash, nil +} + +func (h *txSnapshotHasher) ApplyWavesBalance(snapshot proto.WavesBalanceSnapshot) error { + buf := bytebufferpool.Get() + + // Waves balances: address || balance + if _, err := buf.Write(snapshot.Address[:]); err != nil { + return err + } + if err := writeUint64BigEndian(buf, snapshot.Balance); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyLeaseBalance(snapshot proto.LeaseBalanceSnapshot) error { + buf := bytebufferpool.Get() + + // Lease balance: address || lease_in || lease_out + if _, err := buf.Write(snapshot.Address[:]); err != nil { + return err + } + if err := writeUint64BigEndian(buf, snapshot.LeaseIn); err != nil { + return err + } + if err := writeUint64BigEndian(buf, snapshot.LeaseOut); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyAssetBalance(snapshot proto.AssetBalanceSnapshot) error { + buf := bytebufferpool.Get() + + // Asset balances: address || asset_id || balance + if _, err := buf.Write(snapshot.Address[:]); err != nil { + return err + } + if _, err := buf.Write(snapshot.AssetID[:]); err != nil { + return err + } + if err := writeUint64BigEndian(buf, snapshot.Balance); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyAlias(snapshot proto.AliasSnapshot) error { + buf := bytebufferpool.Get() + + // Alias: address || alias + if _, err := buf.Write(snapshot.Address[:]); err != nil { + return err + } + if _, err := buf.WriteString(snapshot.Alias.Alias); err != nil { // we assume that string is valid UTF-8 + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyNewAsset(snapshot proto.NewAssetSnapshot) error { + buf := bytebufferpool.Get() + + // Static asset info: asset_id || issuer || decimals || is_nft + if _, err := buf.Write(snapshot.AssetID[:]); err != nil { + return err + } + if _, err := buf.Write(snapshot.IssuerPublicKey[:]); err != nil { + return err + } + if err := buf.WriteByte(snapshot.Decimals); err != nil { + return err + } + if err := writeBool(buf, snapshot.IsNFT); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyAssetDescription(snapshot proto.AssetDescriptionSnapshot) error { + if h.blockHeight == 0 { // sanity check + return errors.New("failed to apply asset description snapshot: block height is not set") + } + buf := bytebufferpool.Get() + + // Asset name and description: asset_id || name || description || change_height + if _, err := buf.Write(snapshot.AssetID[:]); err != nil { + return err + } + if _, err := buf.WriteString(snapshot.AssetName); err != nil { // we assume that string is valid UTF-8 + return err + } + if _, err := buf.WriteString(snapshot.AssetDescription); err != nil { // we assume that string is valid UTF-8 + return err + } + // in scala node height is hashed as 4 byte integer + if err := writeUint32BigEndian(buf, uint32(h.blockHeight)); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyAssetVolume(snapshot proto.AssetVolumeSnapshot) error { + totalQuantityBytes := snapshot.TotalQuantity.Bytes() // here the number is represented in big-endian form + buf := bytebufferpool.Get() + + // Asset reissuability: asset_id || is_reissuable || total_quantity + if _, err := buf.Write(snapshot.AssetID[:]); err != nil { + return err + } + if err := writeBool(buf, snapshot.IsReissuable); err != nil { + return err + } + if _, err := buf.Write(totalQuantityBytes); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyAssetScript(snapshot proto.AssetScriptSnapshot) error { + buf := bytebufferpool.Get() + + // Asset script: asset_id || script + if _, err := buf.Write(snapshot.AssetID[:]); err != nil { + return err + } + if _, err := buf.Write(snapshot.Script); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplySponsorship(snapshot proto.SponsorshipSnapshot) error { + buf := bytebufferpool.Get() + + // Sponsorship: asset_id || min_sponsored_fee + if _, err := buf.Write(snapshot.AssetID[:]); err != nil { + return err + } + if err := writeUint64BigEndian(buf, snapshot.MinSponsoredFee); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyAccountScript(snapshot proto.AccountScriptSnapshot) error { + if snapshot.Script.IsEmpty() { + buf := bytebufferpool.Get() + + // Emtpy account script: sender_public_key + + if _, err := buf.Write(snapshot.SenderPublicKey[:]); err != nil { + return err + } + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil + } + + buf := bytebufferpool.Get() + + // Not emtpy account script: sender_public_key || script || verifier_complexity + if _, err := buf.Write(snapshot.SenderPublicKey[:]); err != nil { + return err + } + if _, err := buf.Write(snapshot.Script); err != nil { + return err + } + if err := writeUint64BigEndian(buf, snapshot.VerifierComplexity); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyFilledVolumeAndFee(snapshot proto.FilledVolumeFeeSnapshot) error { + buf := bytebufferpool.Get() + + // Filled volume and fee: order_id || filled_volume || filled_fee + if _, err := buf.Write(snapshot.OrderID[:]); err != nil { + return err + } + if err := writeUint64BigEndian(buf, snapshot.FilledVolume); err != nil { + return err + } + if err := writeUint64BigEndian(buf, snapshot.FilledFee); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyDataEntries(snapshot proto.DataEntriesSnapshot) error { + for _, entry := range snapshot.DataEntries { + entryKey := entry.GetKey() + + buf := bytebufferpool.Get() + + // Data entries: address || key || data_entry + if _, err := buf.Write(snapshot.Address[:]); err != nil { + return err + } + if _, err := buf.WriteString(entryKey); err != nil { // we assume that string is valid UTF-8 + return err + } + if err := entry.WriteValueTo(buf); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + } + return nil +} + +func (h *txSnapshotHasher) applyLeaseStatusHashEntry(leaseID crypto.Digest, isActive bool) error { + buf := bytebufferpool.Get() + + // Lease details: lease_id || is_active + if _, err := buf.Write(leaseID[:]); err != nil { + return err + } + if err := writeBool(buf, isActive); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} + +func (h *txSnapshotHasher) ApplyNewLease(snapshot proto.NewLeaseSnapshot) error { + buf := bytebufferpool.Get() + + // Lease details: lease_id || sender_public_key || recipient || amount + if _, err := buf.Write(snapshot.LeaseID[:]); err != nil { + return err + } + if _, err := buf.Write(snapshot.SenderPK[:]); err != nil { + return err + } + if _, err := buf.Write(snapshot.RecipientAddr[:]); err != nil { + return err + } + if err := writeUint64BigEndian(buf, snapshot.Amount); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return h.applyLeaseStatusHashEntry(snapshot.LeaseID, true) +} + +func (h *txSnapshotHasher) ApplyCancelledLease(snapshot proto.CancelledLeaseSnapshot) error { + return h.applyLeaseStatusHashEntry(snapshot.LeaseID, false) +} + +func (h *txSnapshotHasher) ApplyTransactionsStatus(snapshot proto.TransactionStatusSnapshot) error { + if len(h.transactionID) == 0 { // sanity check + return errors.New("failed to apply transaction status snapshot: transaction ID is not set") + } + // Application status is one byte, either 0x01 (script execution failed) or 0x02 (elided). + var applicationStatus byte + switch v := snapshot.Status; v { + case proto.TransactionSucceeded: + return nil // don't hash transaction status snapshot in case of successful transaction + case proto.TransactionFailed: + applicationStatus = 1 + case proto.TransactionElided: + applicationStatus = 2 + default: + return errors.Errorf("invalid status value (%d) of TransactionStatus snapshot", v) + } + + buf := bytebufferpool.Get() + + // Non-successful transaction application status: tx_id || application_status + if _, err := buf.Write(h.transactionID); err != nil { + return err + } + if err := buf.WriteByte(applicationStatus); err != nil { + return err + } + + h.hashEntries = append(h.hashEntries, hashEntry{data: buf}) + return nil +} diff --git a/pkg/state/snapshot_hasher_internal_test.go b/pkg/state/snapshot_hasher_internal_test.go new file mode 100644 index 000000000..0361a077c --- /dev/null +++ b/pkg/state/snapshot_hasher_internal_test.go @@ -0,0 +1,238 @@ +package state + +import ( + "encoding/base64" + "encoding/hex" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/wavesplatform/gowaves/pkg/crypto" + g "github.com/wavesplatform/gowaves/pkg/grpc/generated/waves" + "github.com/wavesplatform/gowaves/pkg/proto" +) + +func TestTxSnapshotHasher(t *testing.T) { + const ( + scheme = proto.TestNetScheme + blockHeight = 10 + ) + testCases := []struct { + testCaseName string + pbInBase64 string + prevStateHashHex string + expectedStateHashHex string + transactionIDBase58 string + }{ + { + testCaseName: "waves_balances", + pbInBase64: "CiQKGgFUYP1Q7yDeRXEgffuciL58HC+KIscK2I+1EgYQgJTr3AMKJAoaAVRCxcljc/UP2BNQYE8cFPKmySVq2v0ZsCoSBhCAqNa5Bw==", //nolint:lll + prevStateHashHex: crypto.MustFastHash(nil).Hex(), + expectedStateHashHex: "f0a8b6745534c2d20412f40cdb097b7050898e44531a661ef64fc5be0744ac72", + transactionIDBase58: "", + }, + { + testCaseName: "asset_balances", + pbInBase64: "CkMKGgFUYP1Q7yDeRXEgffuciL58HC+KIscK2I+1EiUKIF5mn4IKZ9CIbYdHjPBDoqx4XMevVdwxzhB1OUvTUKJbEJBOCkQKGgFUQsXJY3P1D9gTUGBPHBTypsklatr9GbAqEiYKIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEKCcAQ==", //nolint:lll + prevStateHashHex: "f0a8b6745534c2d20412f40cdb097b7050898e44531a661ef64fc5be0744ac72", + expectedStateHashHex: "16c4803d12ee8e9d6c705ca6334fd84f57c0e78c4ed8a9a3dc6c28dcd9b29a34", + transactionIDBase58: "", + }, + { + testCaseName: "data_entries", + pbInBase64: "YloKGgFUYP1Q7yDeRXEgffuciL58HC+KIscK2I+1EgUKA2ZvbxISCgNiYXJqC1N0cmluZ1ZhbHVlEiEKA2JhemIaAVRg/VDvIN5FcSB9+5yIvnwcL4oixwrYj7ViLwoaAVRCxcljc/UP2BNQYE8cFPKmySVq2v0ZsCoSCAoDZm9vULAJEgcKA2JhclgB", //nolint:lll + prevStateHashHex: "16c4803d12ee8e9d6c705ca6334fd84f57c0e78c4ed8a9a3dc6c28dcd9b29a34", + expectedStateHashHex: "d33269372999bfd8f7afdf97e23bc343bcf3812f437e8971681a37d56868ec8a", + transactionIDBase58: "", + }, + { + testCaseName: "account_script", + pbInBase64: "Wi4KIFDHWa9Cd6VU8M20LLFHzbBTveERf1sEOw19SUS40GBoEgcGAQaw0U/PGPoB", + prevStateHashHex: "d33269372999bfd8f7afdf97e23bc343bcf3812f437e8971681a37d56868ec8a", + expectedStateHashHex: "dcdf7df91b11fdbeb2d99c4fd64abb4657adfda15eed63b1d4730aa2b6275ee2", + transactionIDBase58: "", + }, + { + testCaseName: "asset_script", + pbInBase64: "QisKIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEgcGAQaw0U/P", + prevStateHashHex: "dcdf7df91b11fdbeb2d99c4fd64abb4657adfda15eed63b1d4730aa2b6275ee2", + expectedStateHashHex: "d3c7f2aeb1d978ecebc2fe1f0555e4378cef5171db460d8bbfebef0e59c3a44c", + transactionIDBase58: "", + }, + { + testCaseName: "new_lease", + pbInBase64: "EiIKGgFUYP1Q7yDeRXEgffuciL58HC+KIscK2I+1GICa4uEQEiIKGgFUQsXJY3P1D9gTUGBPHBTypsklatr9GbAqEICuzb4UGmYKILiCMyyFggW8Zd2LGt/AtMr7WWp+kfWbzlN93pXZqzqNEiBQx1mvQnelVPDNtCyxR82wU73hEX9bBDsNfUlEuNBgaBoaAVRCxcljc/UP2BNQYE8cFPKmySVq2v0ZsCoggPKLqAk=", //nolint:lll + prevStateHashHex: "d3c7f2aeb1d978ecebc2fe1f0555e4378cef5171db460d8bbfebef0e59c3a44c", + expectedStateHashHex: "2665ce187b867f2dae95699882d9fd7c31039c505b8af93ed22cada90524ff37", + transactionIDBase58: "", + }, + { + testCaseName: "cancelled_lease", + pbInBase64: "EiIKGgFUMCPLqLW81X2Atgaj2KwF9QkaJq47Cev9GICo1rkHEhwKGgFUYSJd8vzI9rq7GdIuDy65JMc8zi497E98IiIKILiCMyyFggW8Zd2LGt/AtMr7WWp+kfWbzlN93pXZqzqN", //nolint:lll + prevStateHashHex: "2665ce187b867f2dae95699882d9fd7c31039c505b8af93ed22cada90524ff37", + expectedStateHashHex: "dafc56fb4f5e13ddd3e82547874e154c5c61ac556e76e9e9766b5d7ccbc1e1be", + transactionIDBase58: "", + }, + { + testCaseName: "sponsorship", + pbInBase64: "aiUKIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEPwq", + prevStateHashHex: "dafc56fb4f5e13ddd3e82547874e154c5c61ac556e76e9e9766b5d7ccbc1e1be", + expectedStateHashHex: "d9eab5091d57c18c38e0a8702e7cbe6f133e109281f2ef0f2bc88686b458f31f", + transactionIDBase58: "", + }, + { + testCaseName: "alias", + pbInBase64: "SiYKGgFUQsXJY3P1D9gTUGBPHBTypsklatr9GbAqEgh3YXZlc2V2bw==", + prevStateHashHex: "d9eab5091d57c18c38e0a8702e7cbe6f133e109281f2ef0f2bc88686b458f31f", + expectedStateHashHex: "eaa251c161cfe875932275ce6ff8873cd169099e021f09245f4069ccd58d6669", + transactionIDBase58: "", + }, + { + testCaseName: "order_fill", + pbInBase64: "UisKIMkknO8yHpMUT/XKkkdlrbYCG0Dt+qvVgphfgtRbyRDMEICU69wDGNAPUisKIJZ9YwvJObbWItHAD2zhbaFOTFx2zQ4p0Xbo81GXHKeEEICU69wDGNAP", //nolint:lll + prevStateHashHex: "eaa251c161cfe875932275ce6ff8873cd169099e021f09245f4069ccd58d6669", + expectedStateHashHex: "de22575b5c2ef7de6388c0ea96e6d0f172802f4c8e33684473c91af65866b1d4", + transactionIDBase58: "", + }, + { + testCaseName: "new_asset", + pbInBase64: "KkYKIF5mn4IKZ9CIbYdHjPBDoqx4XMevVdwxzhB1OUvTUKJbEiDcYGFqY9MotHTpDpskoycN/Mt62bZfPxIC4fpU0ZTBniABKkYKIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEiDcYGFqY9MotHTpDpskoycN/Mt62bZfPxIC4fpU0ZTBnhgIMi8KIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEAEaCQT/////////9jIlCiBeZp+CCmfQiG2HR4zwQ6KseFzHr1XcMc4QdTlL01CiWxoBAQ==", //nolint:lll + prevStateHashHex: "de22575b5c2ef7de6388c0ea96e6d0f172802f4c8e33684473c91af65866b1d4", + expectedStateHashHex: "5f09358e944a386ad12b4f6e22c79a5c614967f6da40465e30d878e9b58e75e2", + transactionIDBase58: "", + }, + { + testCaseName: "reissued_asset", + pbInBase64: "MigKIDhvjT3TTlJ+v4Ni205vcYc1m9WWgnQPFovjmJI1H62yGgQ7msoA", + prevStateHashHex: "5f09358e944a386ad12b4f6e22c79a5c614967f6da40465e30d878e9b58e75e2", + expectedStateHashHex: "6d5e0f4e2a4b650541b66711bbc687f51fea7bc3aa35b43642e21ab3dd064743", + transactionIDBase58: "", + }, + { + testCaseName: "renamed_asset", + pbInBase64: "OkMKIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEgduZXduYW1lGhZzb21lIGZhbmN5IGRlc2NyaXB0aW9u", + prevStateHashHex: "6d5e0f4e2a4b650541b66711bbc687f51fea7bc3aa35b43642e21ab3dd064743", + expectedStateHashHex: "885ac4b03397e63cdc1a2e3fe60d2aae0d4701e5cfb8c19ca80feb912a028a48", + transactionIDBase58: "", + }, + { + testCaseName: "failed_transaction", + pbInBase64: "CiQKGgFUQsXJY3P1D9gTUGBPHBTypsklatr9GbAqEgYQ4PHE1wlwAQ==", + prevStateHashHex: "885ac4b03397e63cdc1a2e3fe60d2aae0d4701e5cfb8c19ca80feb912a028a48", + expectedStateHashHex: "4185fb099c6dd4f483d4488045cc0912f02b9c292128b90142367af680ce2a32", + transactionIDBase58: "C6tHv5UkPaC53WFEr1Kv4Nb6q7hHdypDThjyYwRUUhQ8", + }, + { + testCaseName: "elided_transaction", + pbInBase64: "cAI=", + prevStateHashHex: "4185fb099c6dd4f483d4488045cc0912f02b9c292128b90142367af680ce2a32", + expectedStateHashHex: "7a15507d73ff9f98c3c777e687e23a4c8b33d02212203be73f0518403e91d431", + transactionIDBase58: "Feix2sUAxsqhUH5kwRJBqdXur3Fj2StgCksbhdt67fXc", + }, + { + testCaseName: "all_together", + pbInBase64: "CkMKGgFUYP1Q7yDeRXEgffuciL58HC+KIscK2I+1EiUKIF5mn4IKZ9CIbYdHjPBDoqx4XMevVdwxzhB1OUvTUKJbEJBOCkQKGgFUQsXJY3P1D9gTUGBPHBTypsklatr9GbAqEiYKIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEKCcAQokChoBVGD9UO8g3kVxIH37nIi+fBwviiLHCtiPtRIGEICU69wDCiQKGgFUQsXJY3P1D9gTUGBPHBTypsklatr9GbAqEgYQgKjWuQcSIgoaAVRg/VDvIN5FcSB9+5yIvnwcL4oixwrYj7UYgJri4RASIgoaAVRCxcljc/UP2BNQYE8cFPKmySVq2v0ZsCoQgK7NvhQSIgoaAVQwI8uotbzVfYC2BqPYrAX1CRomrjsJ6/0YgKjWuQcSHAoaAVRhIl3y/Mj2ursZ0i4PLrkkxzzOLj3sT3waZgoguIIzLIWCBbxl3Ysa38C0yvtZan6R9ZvOU33eldmrOo0SIFDHWa9Cd6VU8M20LLFHzbBTveERf1sEOw19SUS40GBoGhoBVELFyWNz9Q/YE1BgTxwU8qbJJWra/RmwKiCA8ouoCSIiCiC4gjMshYIFvGXdixrfwLTK+1lqfpH1m85Tfd6V2as6jSpGCiBeZp+CCmfQiG2HR4zwQ6KseFzHr1XcMc4QdTlL01CiWxIg3GBhamPTKLR06Q6bJKMnDfzLetm2Xz8SAuH6VNGUwZ4gASpGCiB4ncARI9U2D3CCr9S0ari/6LUWC7+1JsBD5fx4Ok+JThIg3GBhamPTKLR06Q6bJKMnDfzLetm2Xz8SAuH6VNGUwZ4YCDIvCiB4ncARI9U2D3CCr9S0ari/6LUWC7+1JsBD5fx4Ok+JThABGgkE//////////YyJQogXmafggpn0Ihth0eM8EOirHhcx69V3DHOEHU5S9NQolsaAQEyKAogOG+NPdNOUn6/g2LbTm9xhzWb1ZaCdA8Wi+OYkjUfrbIaBDuaygA6QwogeJ3AESPVNg9wgq/UtGq4v+i1Fgu/tSbAQ+X8eDpPiU4SB25ld25hbWUaFnNvbWUgZmFuY3kgZGVzY3JpcHRpb25KJgoaAVRCxcljc/UP2BNQYE8cFPKmySVq2v0ZsCoSCHdhdmVzZXZvUisKIMkknO8yHpMUT/XKkkdlrbYCG0Dt+qvVgphfgtRbyRDMEICU69wDGNAPUisKIJZ9YwvJObbWItHAD2zhbaFOTFx2zQ4p0Xbo81GXHKeEEICU69wDGNAPWi4KIFDHWa9Cd6VU8M20LLFHzbBTveERf1sEOw19SUS40GBoEgcGAQaw0U/PGPoBYloKGgFUYP1Q7yDeRXEgffuciL58HC+KIscK2I+1EgUKA2ZvbxISCgNiYXJqC1N0cmluZ1ZhbHVlEiEKA2JhemIaAVRg/VDvIN5FcSB9+5yIvnwcL4oixwrYj7ViLwoaAVRCxcljc/UP2BNQYE8cFPKmySVq2v0ZsCoSCAoDZm9vULAJEgcKA2JhclgBaiUKIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEPwqcAE=", //nolint:lll + prevStateHashHex: "7a15507d73ff9f98c3c777e687e23a4c8b33d02212203be73f0518403e91d431", + expectedStateHashHex: "6502773294f32cc1702d374ffc1e67ee278cd63c5f00432f80f64a689fcb17f9", + transactionIDBase58: "5gEi2kgbMSfUzdDXRKovEbEezq5ACpr8WTeafwkKQmHW", + }, + } + + hasher, hErr := newTxSnapshotHasherDefault() + require.NoError(t, hErr) + defer hasher.Release() + + for _, testCase := range testCases { + t.Run(testCase.testCaseName, func(t *testing.T) { + pbBytes, err := base64.StdEncoding.DecodeString(testCase.pbInBase64) + require.NoError(t, err) + + txSnapshotProto := new(g.TransactionStateSnapshot) + err = txSnapshotProto.UnmarshalVT(pbBytes) + require.NoError(t, err) + + prevHashBytes, err := hex.DecodeString(testCase.prevStateHashHex) + require.NoError(t, err) + prevHash, err := crypto.NewDigestFromBytes(prevHashBytes) + require.NoError(t, err) + + txSnapshot, err := proto.TxSnapshotsFromProtobuf(scheme, txSnapshotProto) + assert.NoError(t, err) + + var transactionID crypto.Digest + if txIDStr := testCase.transactionIDBase58; txIDStr != "" { + transactionID, err = crypto.NewDigestFromBase58(txIDStr) + require.NoError(t, err) + } + + hash, err := calculateTxSnapshotStateHash(hasher, transactionID.Bytes(), blockHeight, prevHash, txSnapshot) + require.NoError(t, err) + + assert.Equal(t, testCase.expectedStateHashHex, hash.Hex()) + }) + } +} + +func BenchmarkTxSnapshotHasher(b *testing.B) { + const ( + scheme = proto.TestNetScheme + blockHeight = 10 + ) + testCase := struct { + testCaseName string + pbInBase64 string + prevStateHashHex string + transactionIDBase58 string + expectedStateHashHex string + }{ + testCaseName: "all_together", + pbInBase64: "CkMKGgFUYP1Q7yDeRXEgffuciL58HC+KIscK2I+1EiUKIF5mn4IKZ9CIbYdHjPBDoqx4XMevVdwxzhB1OUvTUKJbEJBOCkQKGgFUQsXJY3P1D9gTUGBPHBTypsklatr9GbAqEiYKIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEKCcAQokChoBVGD9UO8g3kVxIH37nIi+fBwviiLHCtiPtRIGEICU69wDCiQKGgFUQsXJY3P1D9gTUGBPHBTypsklatr9GbAqEgYQgKjWuQcSIgoaAVRg/VDvIN5FcSB9+5yIvnwcL4oixwrYj7UYgJri4RASIgoaAVRCxcljc/UP2BNQYE8cFPKmySVq2v0ZsCoQgK7NvhQSIgoaAVQwI8uotbzVfYC2BqPYrAX1CRomrjsJ6/0YgKjWuQcSHAoaAVRhIl3y/Mj2ursZ0i4PLrkkxzzOLj3sT3waZgoguIIzLIWCBbxl3Ysa38C0yvtZan6R9ZvOU33eldmrOo0SIFDHWa9Cd6VU8M20LLFHzbBTveERf1sEOw19SUS40GBoGhoBVELFyWNz9Q/YE1BgTxwU8qbJJWra/RmwKiCA8ouoCSIiCiC4gjMshYIFvGXdixrfwLTK+1lqfpH1m85Tfd6V2as6jSpGCiBeZp+CCmfQiG2HR4zwQ6KseFzHr1XcMc4QdTlL01CiWxIg3GBhamPTKLR06Q6bJKMnDfzLetm2Xz8SAuH6VNGUwZ4gASpGCiB4ncARI9U2D3CCr9S0ari/6LUWC7+1JsBD5fx4Ok+JThIg3GBhamPTKLR06Q6bJKMnDfzLetm2Xz8SAuH6VNGUwZ4YCDIvCiB4ncARI9U2D3CCr9S0ari/6LUWC7+1JsBD5fx4Ok+JThABGgkE//////////YyJQogXmafggpn0Ihth0eM8EOirHhcx69V3DHOEHU5S9NQolsaAQEyKAogOG+NPdNOUn6/g2LbTm9xhzWb1ZaCdA8Wi+OYkjUfrbIaBDuaygA6QwogeJ3AESPVNg9wgq/UtGq4v+i1Fgu/tSbAQ+X8eDpPiU4SB25ld25hbWUaFnNvbWUgZmFuY3kgZGVzY3JpcHRpb25KJgoaAVRCxcljc/UP2BNQYE8cFPKmySVq2v0ZsCoSCHdhdmVzZXZvUisKIMkknO8yHpMUT/XKkkdlrbYCG0Dt+qvVgphfgtRbyRDMEICU69wDGNAPUisKIJZ9YwvJObbWItHAD2zhbaFOTFx2zQ4p0Xbo81GXHKeEEICU69wDGNAPWi4KIFDHWa9Cd6VU8M20LLFHzbBTveERf1sEOw19SUS40GBoEgcGAQaw0U/PGPoBYloKGgFUYP1Q7yDeRXEgffuciL58HC+KIscK2I+1EgUKA2ZvbxISCgNiYXJqC1N0cmluZ1ZhbHVlEiEKA2JhemIaAVRg/VDvIN5FcSB9+5yIvnwcL4oixwrYj7ViLwoaAVRCxcljc/UP2BNQYE8cFPKmySVq2v0ZsCoSCAoDZm9vULAJEgcKA2JhclgBaiUKIHidwBEj1TYPcIKv1LRquL/otRYLv7UmwEPl/Hg6T4lOEPwqcAE=", //nolint:lll + prevStateHashHex: "7a15507d73ff9f98c3c777e687e23a4c8b33d02212203be73f0518403e91d431", + transactionIDBase58: "5gEi2kgbMSfUzdDXRKovEbEezq5ACpr8WTeafwkKQmHW", + expectedStateHashHex: "6502773294f32cc1702d374ffc1e67ee278cd63c5f00432f80f64a689fcb17f9", + } + pbBytes, err := base64.StdEncoding.DecodeString(testCase.pbInBase64) + require.NoError(b, err) + + txSnapshotProto := new(g.TransactionStateSnapshot) + err = txSnapshotProto.UnmarshalVT(pbBytes) + require.NoError(b, err) + + prevHashBytes, err := hex.DecodeString(testCase.prevStateHashHex) + require.NoError(b, err) + prevHash, err := crypto.NewDigestFromBytes(prevHashBytes) + require.NoError(b, err) + + txSnapshot, err := proto.TxSnapshotsFromProtobuf(scheme, txSnapshotProto) + assert.NoError(b, err) + + transactionID, err := crypto.NewDigestFromBase58(testCase.transactionIDBase58) + require.NoError(b, err) + txID := transactionID.Bytes() + + expectedHashBytes, err := hex.DecodeString(testCase.expectedStateHashHex) + require.NoError(b, err) + expectedHash, err := crypto.NewDigestFromBytes(expectedHashBytes) + require.NoError(b, err) + + hasher, err := newTxSnapshotHasherDefault() + require.NoError(b, err) + defer hasher.Release() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + b.Run(testCase.testCaseName, func(b *testing.B) { + b.ReportAllocs() + for j := 0; j < b.N; j++ { + h, hErr := calculateTxSnapshotStateHash(hasher, txID, blockHeight, prevHash, txSnapshot) + if hErr != nil { + b.Fatalf("error occured: %+v", err) + } + if h != expectedHash { + b.Fatalf("expectedHash=%s != actual=%s", expectedHash.Hex(), h.Hex()) + } + } + }) + } +} diff --git a/pkg/state/state.go b/pkg/state/state.go index 1fe59b3b8..3e4b15161 100644 --- a/pkg/state/state.go +++ b/pkg/state/state.go @@ -117,7 +117,7 @@ func (s *blockchainEntitiesStorage) putStateHash(prevHash []byte, height uint64, if err := sh.GenerateSumHash(prevHash); err != nil { return nil, err } - if err := s.stateHashes.saveStateHash(sh, height); err != nil { + if err := s.stateHashes.saveLegacyStateHash(sh, height); err != nil { return nil, err } return sh, nil @@ -145,7 +145,7 @@ func (s *blockchainEntitiesStorage) prepareHashes() error { return nil } -func (s *blockchainEntitiesStorage) handleStateHashes(blockchainHeight uint64, blockIds []proto.BlockID) error { +func (s *blockchainEntitiesStorage) handleLegacyStateHashes(blockchainHeight uint64, blockIds []proto.BlockID) error { if !s.calculateHashes { return nil } @@ -156,7 +156,7 @@ func (s *blockchainEntitiesStorage) handleStateHashes(blockchainHeight uint64, b if err := s.prepareHashes(); err != nil { return err } - prevHash, err := s.stateHashes.stateHash(blockchainHeight) + prevHash, err := s.stateHashes.legacyStateHash(blockchainHeight) if err != nil { return err } @@ -589,9 +589,15 @@ func (s *stateManager) Map(func(State) error) error { func (s *stateManager) addGenesisBlock() error { ctx, cancel := context.WithCancel(context.Background()) defer cancel() + + initSH, shErr := crypto.FastHash(nil) // zero/initial snapshot state hash according to the specification + if shErr != nil { + return shErr + } + chans := launchVerifier(ctx, s.verificationGoroutinesNum, s.settings.AddressSchemeCharacter) - if err := s.addNewBlock(s.genesis, nil, chans, 0); err != nil { + if err := s.addNewBlock(s.genesis, nil, chans, 0, initSH); err != nil { return err } if err := s.stor.hitSources.appendBlockHitSource(s.genesis, 1, s.genesis.GenSignature); err != nil { @@ -1059,8 +1065,13 @@ func (s *stateManager) addRewardVote(block *proto.Block, height uint64) error { return s.stor.monetaryPolicy.vote(block.RewardVote, height, activation, isCappedRewardsActivated, block.BlockID()) } -func (s *stateManager) addNewBlock(block, parent *proto.Block, chans *verifierChans, height uint64) error { - blockHeight := height + 1 +func (s *stateManager) addNewBlock( + block, parent *proto.Block, + chans *verifierChans, + blockchainHeight uint64, + lastSnapshotStateHash crypto.Digest, +) error { + blockHeight := blockchainHeight + 1 // Add score. if err := s.stor.scores.appendBlockScore(block, blockHeight); err != nil { return err @@ -1082,11 +1093,12 @@ func (s *stateManager) addNewBlock(block, parent *proto.Block, chans *verifierCh parentHeader = &parent.BlockHeader } params := &appendBlockParams{ - transactions: transactions, - chans: chans, - block: &block.BlockHeader, - parent: parentHeader, - height: height, + transactions: transactions, + chans: chans, + block: &block.BlockHeader, + parent: parentHeader, + blockchainHeight: blockchainHeight, + lastSnapshotStateHash: lastSnapshotStateHash, } // Check and perform block's transactions, create balance diffs, write transactions to storage. if err := s.appender.appendBlock(params); err != nil { @@ -1512,12 +1524,18 @@ func (s *stateManager) addBlocks() (*proto.Block, error) { if err != nil { return nil, err } + sh, err := s.stor.stateHashes.newestSnapshotStateHash(blockchainCurHeight) + if err != nil { + return nil, errors.Wrapf(err, "failed to get newest snapshot state hash for height %d", + blockchainCurHeight, + ) + } if err := s.stor.hitSources.appendBlockHitSource(block, blockchainCurHeight+1, hs); err != nil { return nil, err } // Save block to storage, check its transactions, create and save balance diffs for its transactions. - if err := s.addNewBlock(block, lastAppliedBlock, chans, blockchainCurHeight); err != nil { - return nil, err + if addErr := s.addNewBlock(block, lastAppliedBlock, chans, blockchainCurHeight, sh); addErr != nil { + return nil, addErr } if s.needToFinishVotingPeriod(blockchainCurHeight + 1) { @@ -1542,9 +1560,9 @@ func (s *stateManager) addBlocks() (*proto.Block, error) { return nil, err } - // Retrieve and store state hashes for each of new blocks. - if err := s.stor.handleStateHashes(height, ids); err != nil { - return nil, wrapErr(ModificationError, err) + // Retrieve and store legacy state hashes for each of new blocks. + if shErr := s.stor.handleLegacyStateHashes(height, ids); shErr != nil { + return nil, wrapErr(ModificationError, shErr) } // Validate consensus (i.e. that all the new blocks were mined fairly). if err := s.cv.ValidateHeadersBatch(headers[:pos], height); err != nil { @@ -2479,7 +2497,7 @@ func (s *stateManager) ProvidesStateHashes() (bool, error) { return provides, nil } -func (s *stateManager) StateHashAtHeight(height uint64) (*proto.StateHash, error) { +func (s *stateManager) LegacyStateHashAtHeight(height proto.Height) (*proto.StateHash, error) { hasData, err := s.ProvidesStateHashes() if err != nil { return nil, wrapErr(Other, err) @@ -2487,13 +2505,21 @@ func (s *stateManager) StateHashAtHeight(height uint64) (*proto.StateHash, error if !hasData { return nil, wrapErr(IncompatibilityError, errors.New("state does not have data for state hashes")) } - sh, err := s.stor.stateHashes.stateHash(height) + sh, err := s.stor.stateHashes.legacyStateHash(height) if err != nil { return nil, wrapErr(RetrievalError, err) } return sh, nil } +func (s *stateManager) SnapshotStateHashAtHeight(height proto.Height) (crypto.Digest, error) { + sh, err := s.stor.stateHashes.snapshotStateHash(height) + if err != nil { + return crypto.Digest{}, wrapErr(RetrievalError, err) + } + return sh, nil +} + func (s *stateManager) IsNotFound(err error) bool { return IsNotFound(err) } diff --git a/pkg/state/state_hashes.go b/pkg/state/state_hashes.go index 20d22ebf8..604ba4be0 100644 --- a/pkg/state/state_hashes.go +++ b/pkg/state/state_hashes.go @@ -1,6 +1,7 @@ package state import ( + "github.com/wavesplatform/gowaves/pkg/crypto" "github.com/wavesplatform/gowaves/pkg/proto" ) @@ -12,13 +13,13 @@ func newStateHashes(hs *historyStorage) *stateHashes { return &stateHashes{hs} } -func (s *stateHashes) saveStateHash(sh *proto.StateHash, height uint64) error { - key := stateHashKey{height: height} - return s.hs.addNewEntry(stateHash, key.bytes(), sh.MarshalBinary(), sh.BlockID) +func (s *stateHashes) saveLegacyStateHash(sh *proto.StateHash, height proto.Height) error { + key := legacyStateHashKey{height: height} + return s.hs.addNewEntry(legacyStateHash, key.bytes(), sh.MarshalBinary(), sh.BlockID) } -func (s *stateHashes) stateHash(height uint64) (*proto.StateHash, error) { - key := stateHashKey{height: height} +func (s *stateHashes) legacyStateHash(height proto.Height) (*proto.StateHash, error) { + key := legacyStateHashKey{height: height} stateHashBytes, err := s.hs.topEntryData(key.bytes()) if err != nil { return nil, err @@ -29,3 +30,26 @@ func (s *stateHashes) stateHash(height uint64) (*proto.StateHash, error) { } return &sh, nil } + +func (s *stateHashes) saveSnapshotStateHash(sh crypto.Digest, height proto.Height, blockID proto.BlockID) error { + key := snapshotStateHashKey{height: height} + return s.hs.addNewEntry(snapshotStateHash, key.bytes(), sh.Bytes(), blockID) +} + +func (s *stateHashes) newestSnapshotStateHash(height proto.Height) (crypto.Digest, error) { + key := snapshotStateHashKey{height: height} + stateHashBytes, err := s.hs.newestTopEntryData(key.bytes()) + if err != nil { + return crypto.Digest{}, err + } + return crypto.NewDigestFromBytes(stateHashBytes) +} + +func (s *stateHashes) snapshotStateHash(height proto.Height) (crypto.Digest, error) { + key := snapshotStateHashKey{height: height} + stateHashBytes, err := s.hs.topEntryData(key.bytes()) + if err != nil { + return crypto.Digest{}, err + } + return crypto.NewDigestFromBytes(stateHashBytes) +} diff --git a/pkg/state/state_test.go b/pkg/state/state_test.go index bb89b685b..63babe104 100644 --- a/pkg/state/state_test.go +++ b/pkg/state/state_test.go @@ -393,8 +393,8 @@ func TestGenesisStateHash(t *testing.T) { manager := newTestStateManager(t, true, params, settings.MainNetSettings) - stateHash, err := manager.StateHashAtHeight(1) - assert.NoError(t, err, "StateHashAtHeight failed") + stateHash, err := manager.LegacyStateHashAtHeight(1) + assert.NoError(t, err, "LegacyStateHashAtHeight failed") var correctHashJs = ` {"sponsorshipHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","blockId":"FSH8eAAzZNqnG8xgTZtz5xuLqXySsXgAjmFEC25hXMbEufiGjqWPnGCZFt6gLiVLJny16ipxRNAkkzjjhqTjBE2","wavesBalanceHash":"211af58aa42c72d0cf546d11d7b9141a00c8394e0f5da2d8e7e9f4ba30e9ad37","accountScriptHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","aliasHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","stateHash":"fab947262e8f5f03807ee7a888c750e46d0544a04d5777f50cc6daaf5f4e8d19","leaseStatusHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","dataEntryHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","assetBalanceHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","assetScriptHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","leaseBalanceHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8"}` var correctHash proto.StateHash @@ -412,8 +412,8 @@ func TestStateHashAtHeight(t *testing.T) { assert.NoError(t, err) err = importer.ApplyFromFile(manager, blocksPath, 9499, 1) assert.NoError(t, err, "ApplyFromFile() failed") - stateHash, err := manager.StateHashAtHeight(9500) - assert.NoError(t, err, "StateHashAtHeight failed") + stateHash, err := manager.LegacyStateHashAtHeight(9500) + assert.NoError(t, err, "LegacyStateHashAtHeight failed") var correctHashJs = ` {"sponsorshipHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","blockId":"2DYapXXAwxPm9WdYjS6bAY2n2fokGWeKmvHrcJy26uDfCFMognrwNEdtWEixaDxx3AahDKcdTDRNXmPVEtVumKjY","wavesBalanceHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","accountScriptHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","aliasHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","stateHash":"df48986cfee70960c977d741146ef4980ca71b20401db663eeff72c332fd8825","leaseStatusHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","dataEntryHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","assetBalanceHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","assetScriptHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8","leaseBalanceHash":"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8"}` var correctHash proto.StateHash diff --git a/pkg/state/threadsafe_wrapper.go b/pkg/state/threadsafe_wrapper.go index f2cadc502..82ac60372 100644 --- a/pkg/state/threadsafe_wrapper.go +++ b/pkg/state/threadsafe_wrapper.go @@ -338,10 +338,16 @@ func (a *ThreadSafeReadWrapper) ProvidesStateHashes() (bool, error) { return a.s.ProvidesStateHashes() } -func (a *ThreadSafeReadWrapper) StateHashAtHeight(height uint64) (*proto.StateHash, error) { +func (a *ThreadSafeReadWrapper) LegacyStateHashAtHeight(height uint64) (*proto.StateHash, error) { a.mu.RLock() defer a.mu.RUnlock() - return a.s.StateHashAtHeight(height) + return a.s.LegacyStateHashAtHeight(height) +} + +func (a *ThreadSafeReadWrapper) SnapshotStateHashAtHeight(height proto.Height) (crypto.Digest, error) { + a.mu.RLock() + defer a.mu.RUnlock() + return a.s.SnapshotStateHashAtHeight(height) } func (a *ThreadSafeReadWrapper) ProvidesExtendedApi() (bool, error) { diff --git a/pkg/state/transaction_checker.go b/pkg/state/transaction_checker.go index c2c39c149..614695c2d 100644 --- a/pkg/state/transaction_checker.go +++ b/pkg/state/transaction_checker.go @@ -29,7 +29,7 @@ type checkerInfo struct { parentTimestamp uint64 blockID proto.BlockID blockVersion proto.BlockVersion - height uint64 + blockchainHeight proto.Height rideV5Activated bool rideV6Activated bool blockRewardDistribution bool @@ -343,7 +343,7 @@ func (tc *transactionChecker) checkGenesis(transaction proto.Transaction, info * if info.blockID != tc.genesis { return out, errors.New("genesis transaction inside of non-genesis block") } - if info.height != 0 { + if info.blockchainHeight != 0 { return out, errors.New("genesis transaction on non zero height") } assets := &txAssets{feeAsset: proto.NewOptionalAssetWaves()} @@ -358,7 +358,7 @@ func (tc *transactionChecker) checkPayment(transaction proto.Transaction, info * if !ok { return out, errors.New("failed to convert interface to Payment transaction") } - if info.height >= tc.settings.BlockVersion3AfterHeight { + if info.blockchainHeight >= tc.settings.BlockVersion3AfterHeight { return out, errors.Errorf("Payment transaction is deprecated after height %d", tc.settings.BlockVersion3AfterHeight) } if err := tc.checkTimestamps(tx.Timestamp, info.currentTimestamp, info.parentTimestamp); err != nil { @@ -471,7 +471,7 @@ func (tc *transactionChecker) checkEthereumTransactionWithProofs(transaction pro paymentAssets := make([]proto.OptionalAsset, 0, len(abiPayments)) for _, p := range abiPayments { - if p.Amount <= 0 && info.height > tc.settings.InvokeNoZeroPaymentsAfterHeight { + if p.Amount <= 0 && info.blockchainHeight > tc.settings.InvokeNoZeroPaymentsAfterHeight { return out, errors.Errorf("invalid payment amount '%d'", p.Amount) } optAsset := proto.NewOptionalAsset(p.PresentAssetID, p.AssetID) @@ -1518,7 +1518,7 @@ func (tc *transactionChecker) checkUpdateAssetInfoWithProofs(transaction proto.T return out, errs.Extend(err, "failed to retrieve last update height") } updateAllowedAt := lastUpdateHeight + tc.settings.MinUpdateAssetInfoInterval - blockHeight := info.height + 1 + blockHeight := info.blockchainHeight + 1 if blockHeight < updateAllowedAt { return out, errs.NewAssetUpdateInterval(fmt.Sprintf("Can't update info of asset with id=%s before height %d, current height is %d", tx.AssetID.String(), updateAllowedAt, blockHeight)) } diff --git a/pkg/state/transaction_checker_test.go b/pkg/state/transaction_checker_test.go index 44bc2d8ed..d66927a09 100644 --- a/pkg/state/transaction_checker_test.go +++ b/pkg/state/transaction_checker_test.go @@ -22,10 +22,9 @@ var ( ) type checkerTestObjects struct { - stor *testStorageObjects - tc *transactionChecker - tp *transactionPerformer - stateActionsCounter *proto.StateActionsCounter + stor *testStorageObjects + tc *transactionChecker + tp *transactionPerformer } func createCheckerTestObjects(t *testing.T, checkerInfo *checkerInfo) *checkerTestObjects { @@ -45,7 +44,7 @@ func createCheckerTestObjects(t *testing.T, checkerInfo *checkerInfo) *checkerTe snapshotGen := newSnapshotGenerator(stor.entities, settings.MainNetSettings.AddressSchemeCharacter) tp := newTransactionPerformer(stor.entities, settings.MainNetSettings, &snapshotGen, &snapshotApplier) - return &checkerTestObjects{stor, tc, tp, actionsCounter} + return &checkerTestObjects{stor, tc, tp} } func defaultCheckerInfo() *checkerInfo { @@ -54,7 +53,7 @@ func defaultCheckerInfo() *checkerInfo { parentTimestamp: defaultTimestamp - settings.MainNetSettings.MaxTxTimeBackOffset/2, blockID: blockID0, blockVersion: 1, - height: 100500, + blockchainHeight: 100500, } } @@ -72,7 +71,7 @@ func TestCheckGenesis(t *testing.T) { _, err = to.tc.checkGenesis(tx, info) assert.EqualError(t, err, "genesis transaction on non zero height") - info.height = 0 + info.blockchainHeight = 0 _, err = to.tc.checkGenesis(tx, info) assert.NoError(t, err, "checkGenesis failed in non-initialisation mode") @@ -87,10 +86,10 @@ func TestCheckPayment(t *testing.T) { tx := createPayment(t) - info.height = settings.MainNetSettings.BlockVersion3AfterHeight + info.blockchainHeight = settings.MainNetSettings.BlockVersion3AfterHeight _, err := to.tc.checkPayment(tx, info) assert.Error(t, err, "checkPayment accepted payment tx after Block v3 height") - info.height = 10 + info.blockchainHeight = 10 _, err = to.tc.checkPayment(tx, info) assert.NoError(t, err, "checkPayment failed with valid payment tx") @@ -264,7 +263,7 @@ func TestCheckReissueWithSig(t *testing.T) { tx.SenderPK = assetInfo.issuer tx.Reissuable = false - _, err = to.tp.performReissueWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performReissueWithSig(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performReissueWithSig failed") to.stor.addBlock(t, blockID0) to.stor.flush(t) @@ -313,7 +312,7 @@ func TestCheckReissueWithProofs(t *testing.T) { tx.SenderPK = assetInfo.issuer tx.Reissuable = false - _, err = to.tp.performReissueWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performReissueWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performReissueWithProofs failed") to.stor.addBlock(t, blockID0) to.stor.flush(t) @@ -646,7 +645,7 @@ func TestCheckLeaseCancelWithSig(t *testing.T) { assert.Error(t, err, "checkLeaseCancelWithSig did not fail when cancelling nonexistent lease") to.stor.addBlock(t, blockID0) - _, err = to.tp.performLeaseWithSig(leaseTx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performLeaseWithSig(leaseTx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performLeaseWithSig failed") to.stor.flush(t) @@ -675,7 +674,7 @@ func TestCheckLeaseCancelWithProofs(t *testing.T) { assert.Error(t, err, "checkLeaseCancelWithProofs did not fail when cancelling nonexistent lease") to.stor.addBlock(t, blockID0) - _, err = to.tp.performLeaseWithProofs(leaseTx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performLeaseWithProofs(leaseTx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performLeaseWithProofs failed") to.stor.flush(t) @@ -691,7 +690,7 @@ func TestCheckLeaseCancelWithProofs(t *testing.T) { _, err = to.tc.checkLeaseCancelWithProofs(tx, info) assert.NoError(t, err, "checkLeaseCancelWithProofs failed with valid leaseCancel tx") - _, err = to.tp.performLeaseCancelWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performLeaseCancelWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performLeaseCancelWithProofs() failed") _, err = to.tc.checkLeaseCancelWithProofs(tx, info) @@ -708,7 +707,7 @@ func TestCheckCreateAliasWithSig(t *testing.T) { assert.NoError(t, err, "checkCreateAliasWithSig failed with valid createAlias tx") to.stor.addBlock(t, blockID0) - _, err = to.tp.performCreateAliasWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performCreateAliasWithSig(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performCreateAliasWithSig failed") to.stor.flush(t) @@ -736,7 +735,7 @@ func TestCheckCreateAliasWithProofs(t *testing.T) { assert.NoError(t, err, "checkCreateAliasWithProofs failed with valid createAlias tx") to.stor.addBlock(t, blockID0) - _, err = to.tp.performCreateAliasWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performCreateAliasWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performCreateAliasWithProofs failed") to.stor.flush(t) @@ -1455,7 +1454,7 @@ func TestCheckUpdateAssetInfoWithProofs(t *testing.T) { to.stor.createAsset(t, tx.FeeAsset.ID) tx.SenderPK = assetInfo.issuer - info.height = 100001 + info.blockchainHeight = 100001 // Check fail prior to activation. _, err := to.tc.checkUpdateAssetInfoWithProofs(tx, info) @@ -1480,9 +1479,11 @@ func TestCheckUpdateAssetInfoWithProofs(t *testing.T) { assert.EqualError(t, err, "asset was issued by other address") tx.SenderPK = assetInfo.issuer - info.height = 99999 + info.blockchainHeight = 99999 _, err = to.tc.checkUpdateAssetInfoWithProofs(tx, info) - correctError := fmt.Sprintf("Can't update info of asset with id=%s before height %d, current height is %d", tx.AssetID.String(), 1+to.tc.settings.MinUpdateAssetInfoInterval, info.height+1) + correctError := fmt.Sprintf("Can't update info of asset with id=%s before height %d, current height is %d", + tx.AssetID.String(), 1+to.tc.settings.MinUpdateAssetInfoInterval, info.blockchainHeight+1, + ) assert.EqualError(t, err, correctError) } diff --git a/pkg/state/transaction_differ_test.go b/pkg/state/transaction_differ_test.go index 72a7031de..d498bd98a 100644 --- a/pkg/state/transaction_differ_test.go +++ b/pkg/state/transaction_differ_test.go @@ -27,10 +27,9 @@ var ( ) type differTestObjects struct { - stor *testStorageObjects - td *transactionDiffer - tp *transactionPerformer - stateActionsCounter *proto.StateActionsCounter + stor *testStorageObjects + td *transactionDiffer + tp *transactionPerformer } func createDifferTestObjects(t *testing.T, checkerInfo *checkerInfo) *differTestObjects { @@ -51,7 +50,7 @@ func createDifferTestObjects(t *testing.T, checkerInfo *checkerInfo) *differTest snapshotGen := newSnapshotGenerator(stor.entities, settings.MainNetSettings.AddressSchemeCharacter) tp := newTransactionPerformer(stor.entities, settings.MainNetSettings, &snapshotGen, &snapshotApplier) require.NoError(t, err, "newTransactionPerformer() failed") - return &differTestObjects{stor, td, tp, actionsCounter} + return &differTestObjects{stor, td, tp} } func createGenesis() *proto.Genesis { @@ -895,7 +894,7 @@ func TestCreateDiffLeaseCancelWithSig(t *testing.T) { to := createDifferTestObjects(t, checkerInfo) leaseTx := createLeaseWithSig(t) - info := defaultPerformerInfo(to.stateActionsCounter) + info := defaultPerformerInfo() to.stor.addBlock(t, blockID0) _, err := to.tp.performLeaseWithSig(leaseTx, info, nil, nil) assert.NoError(t, err, "performLeaseWithSig failed") @@ -929,7 +928,7 @@ func TestCreateDiffLeaseCancelWithProofs(t *testing.T) { to := createDifferTestObjects(t, checkerInfo) leaseTx := createLeaseWithProofs(t) - info := defaultPerformerInfo(to.stateActionsCounter) + info := defaultPerformerInfo() to.stor.addBlock(t, blockID0) _, err := to.tp.performLeaseWithProofs(leaseTx, info, nil, nil) assert.NoError(t, err, "performLeaseWithProofs failed") diff --git a/pkg/state/transaction_performer.go b/pkg/state/transaction_performer.go index a03c3e7ba..c129d6f49 100644 --- a/pkg/state/transaction_performer.go +++ b/pkg/state/transaction_performer.go @@ -11,19 +11,26 @@ import ( ) type performerInfo struct { - height uint64 + blockchainHeight proto.Height blockID proto.BlockID currentMinerAddress proto.WavesAddress - stateActionsCounter *proto.StateActionsCounter checkerData txCheckerData } -func newPerformerInfo(height proto.Height, stateActionsCounter *proto.StateActionsCounter, - blockID proto.BlockID, currentMinerAddress proto.WavesAddress, - checkerData txCheckerData) *performerInfo { - return &performerInfo{height, blockID, - currentMinerAddress, stateActionsCounter, - checkerData} // all fields must be initialized +func (i *performerInfo) blockHeight() proto.Height { return i.blockchainHeight + 1 } + +func newPerformerInfo( + blockchainHeight proto.Height, + blockID proto.BlockID, + currentMinerAddress proto.WavesAddress, + checkerData txCheckerData, +) *performerInfo { + return &performerInfo{ // all fields must be initialized + blockchainHeight, + blockID, + currentMinerAddress, + checkerData, + } } type transactionPerformer struct { @@ -101,7 +108,7 @@ func (tp *transactionPerformer) performIssue( scriptEstimation *scriptEstimation, script proto.Script, ) (txSnapshot, error) { - blockHeight := info.height + 1 + blockHeight := info.blockHeight() // Create new asset. assetInfo := &assetInfo{ assetConstInfo: assetConstInfo{ @@ -272,14 +279,15 @@ func (tp *transactionPerformer) performLease(tx *proto.Lease, txID *crypto.Diges } // Add leasing to lease state. l := &leasing{ - SenderPK: tx.SenderPK, - RecipientAddr: recipientAddr, - Amount: tx.Amount, - OriginHeight: info.height, - Status: LeaseActive, + SenderPK: tx.SenderPK, + RecipientAddr: recipientAddr, + Amount: tx.Amount, + OriginHeight: info.blockHeight(), + OriginTransactionID: txID, + Status: LeaseActive, } leaseID := *txID - snapshot, err := tp.snapshotGenerator.generateSnapshotForLeaseTx(l, leaseID, txID, balanceChanges) + snapshot, err := tp.snapshotGenerator.generateSnapshotForLeaseTx(l, leaseID, balanceChanges) if err != nil { return txSnapshot{}, err } @@ -306,7 +314,12 @@ func (tp *transactionPerformer) performLeaseWithProofs(transaction proto.Transac func (tp *transactionPerformer) performLeaseCancel(tx *proto.LeaseCancel, txID *crypto.Digest, info *performerInfo, balanceChanges txDiff) (txSnapshot, error) { - snapshot, err := tp.snapshotGenerator.generateSnapshotForLeaseCancelTx(txID, tx.LeaseID, info.height, balanceChanges) + snapshot, err := tp.snapshotGenerator.generateSnapshotForLeaseCancelTx( + txID, + tx.LeaseID, + info.blockHeight(), + balanceChanges, + ) if err != nil { return txSnapshot{}, err } diff --git a/pkg/state/transaction_performer_test.go b/pkg/state/transaction_performer_test.go index 7774f9182..7e9e8b137 100644 --- a/pkg/state/transaction_performer_test.go +++ b/pkg/state/transaction_performer_test.go @@ -15,9 +15,8 @@ import ( ) type performerTestObjects struct { - stor *testStorageObjects - tp *transactionPerformer - stateActionsCounter *proto.StateActionsCounter + stor *testStorageObjects + tp *transactionPerformer } func createPerformerTestObjects(t *testing.T, checkerInfo *checkerInfo) *performerTestObjects { @@ -36,12 +35,11 @@ func createPerformerTestObjects(t *testing.T, checkerInfo *checkerInfo) *perform tp := newTransactionPerformer(stor.entities, settings.MainNetSettings, &snapshotGen, &snapshotApplier) - return &performerTestObjects{stor, tp, actionsCounter} + return &performerTestObjects{stor, tp} } -func defaultPerformerInfo(stateActionsCounter *proto.StateActionsCounter) *performerInfo { - _ = stateActionsCounter - return newPerformerInfo(0, stateActionsCounter, blockID0, proto.WavesAddress{}, txCheckerData{}) +func defaultPerformerInfo() *performerInfo { + return newPerformerInfo(0, blockID0, proto.WavesAddress{}, txCheckerData{}) } func defaultCheckerInfoHeight0() *checkerInfo { @@ -50,7 +48,7 @@ func defaultCheckerInfoHeight0() *checkerInfo { parentTimestamp: defaultTimestamp - settings.MainNetSettings.MaxTxTimeBackOffset/2, blockID: blockID0, blockVersion: 1, - height: 0, + blockchainHeight: 0, } } @@ -59,7 +57,7 @@ func TestPerformIssueWithSig(t *testing.T) { to := createPerformerTestObjects(t, checkerInfo) to.stor.addBlock(t, blockID0) tx := createIssueWithSig(t, 1000) - _, err := to.tp.performIssueWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performIssueWithSig(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performIssueWithSig() failed") to.stor.flush(t) expectedAssetInfo := assetInfo{ @@ -91,7 +89,7 @@ func TestPerformIssueWithProofs(t *testing.T) { to.stor.addBlock(t, blockID0) tx := createIssueWithProofs(t, 1000) - _, err := to.tp.performIssueWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performIssueWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performIssueWithProofs() failed") to.stor.flush(t) expectedAssetInfo := assetInfo{ @@ -124,7 +122,7 @@ func TestPerformReissueWithSig(t *testing.T) { assetInfo := to.stor.createAsset(t, testGlobal.asset0.asset.ID) tx := createReissueWithSig(t, 1000) - _, err := to.tp.performReissueWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performReissueWithSig(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performReissueWithSig() failed") to.stor.flush(t) assetInfo.reissuable = tx.Reissuable @@ -142,7 +140,7 @@ func TestPerformReissueWithProofs(t *testing.T) { assetInfo := to.stor.createAsset(t, testGlobal.asset0.asset.ID) tx := createReissueWithProofs(t, 1000) - _, err := to.tp.performReissueWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performReissueWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performReissueWithProofs() failed") to.stor.flush(t) assetInfo.reissuable = tx.Reissuable @@ -160,7 +158,7 @@ func TestPerformBurnWithSig(t *testing.T) { assetInfo := to.stor.createAsset(t, testGlobal.asset0.asset.ID) tx := createBurnWithSig(t) - _, err := to.tp.performBurnWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performBurnWithSig(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performBurnWithSig() failed") to.stor.flush(t) assetInfo.quantity.Sub(&assetInfo.quantity, big.NewInt(int64(tx.Amount))) @@ -177,7 +175,7 @@ func TestPerformBurnWithProofs(t *testing.T) { assetInfo := to.stor.createAsset(t, testGlobal.asset0.asset.ID) tx := createBurnWithProofs(t) - _, err := to.tp.performBurnWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performBurnWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performBurnWithProofs() failed") to.stor.flush(t) assetInfo.quantity.Sub(&assetInfo.quantity, big.NewInt(int64(tx.Amount))) @@ -194,7 +192,7 @@ func TestPerformExchange(t *testing.T) { to.stor.addBlock(t, blockID0) tx := createExchangeWithSig(t) - _, err := to.tp.performExchange(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performExchange(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performExchange() failed") sellOrderID, err := tx.GetOrder2().GetID() @@ -232,11 +230,13 @@ func TestPerformLeaseWithSig(t *testing.T) { to.stor.addBlock(t, blockID0) tx := createLeaseWithSig(t) - _, err := to.tp.performLeaseWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + pi := defaultPerformerInfo() + _, err := to.tp.performLeaseWithSig(tx, pi, nil, nil) assert.NoError(t, err, "performLeaseWithSig() failed") to.stor.flush(t) leasingInfo := &leasing{ OriginTransactionID: tx.ID, + OriginHeight: pi.blockHeight(), Status: LeaseActive, Amount: tx.Amount, RecipientAddr: *tx.Recipient.Address(), @@ -254,11 +254,13 @@ func TestPerformLeaseWithProofs(t *testing.T) { to.stor.addBlock(t, blockID0) tx := createLeaseWithProofs(t) - _, err := to.tp.performLeaseWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + pi := defaultPerformerInfo() + _, err := to.tp.performLeaseWithProofs(tx, pi, nil, nil) assert.NoError(t, err, "performLeaseWithProofs() failed") to.stor.flush(t) leasingInfo := &leasing{ OriginTransactionID: tx.ID, + OriginHeight: pi.blockHeight(), Status: LeaseActive, Amount: tx.Amount, RecipientAddr: *tx.Recipient.Address(), @@ -276,19 +278,22 @@ func TestPerformLeaseCancelWithSig(t *testing.T) { to.stor.addBlock(t, blockID0) leaseTx := createLeaseWithSig(t) - _, err := to.tp.performLeaseWithSig(leaseTx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + pi := defaultPerformerInfo() + _, err := to.tp.performLeaseWithSig(leaseTx, pi, nil, nil) assert.NoError(t, err, "performLeaseWithSig() failed") to.stor.flush(t) tx := createLeaseCancelWithSig(t, *leaseTx.ID) leasingInfo := &leasing{ OriginTransactionID: leaseTx.ID, + OriginHeight: pi.blockHeight(), Status: LeaseCancelled, Amount: leaseTx.Amount, RecipientAddr: *leaseTx.Recipient.Address(), SenderPK: testGlobal.senderInfo.pk, CancelTransactionID: tx.ID, + CancelHeight: pi.blockHeight(), } - _, err = to.tp.performLeaseCancelWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performLeaseCancelWithSig(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performLeaseCancelWithSig() failed") to.stor.flush(t) info, err := to.stor.entities.leases.leasingInfo(*leaseTx.ID) @@ -302,19 +307,22 @@ func TestPerformLeaseCancelWithProofs(t *testing.T) { to.stor.addBlock(t, blockID0) leaseTx := createLeaseWithProofs(t) - _, err := to.tp.performLeaseWithProofs(leaseTx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + pi := defaultPerformerInfo() + _, err := to.tp.performLeaseWithProofs(leaseTx, pi, nil, nil) assert.NoError(t, err, "performLeaseWithProofs() failed") to.stor.flush(t) tx := createLeaseCancelWithProofs(t, *leaseTx.ID) leasingInfo := &leasing{ OriginTransactionID: leaseTx.ID, + OriginHeight: pi.blockHeight(), Status: LeaseCancelled, Amount: leaseTx.Amount, RecipientAddr: *leaseTx.Recipient.Address(), SenderPK: testGlobal.senderInfo.pk, CancelTransactionID: tx.ID, + CancelHeight: pi.blockHeight(), } - _, err = to.tp.performLeaseCancelWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performLeaseCancelWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performLeaseCancelWithProofs() failed") to.stor.flush(t) info, err := to.stor.entities.leases.leasingInfo(*leaseTx.ID) @@ -328,7 +336,7 @@ func TestPerformCreateAliasWithSig(t *testing.T) { to.stor.addBlock(t, blockID0) tx := createCreateAliasWithSig(t) - _, err := to.tp.performCreateAliasWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performCreateAliasWithSig(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performCreateAliasWithSig() failed") to.stor.flush(t) addr, err := to.stor.entities.aliases.addrByAlias(tx.Alias.Alias) @@ -336,7 +344,7 @@ func TestPerformCreateAliasWithSig(t *testing.T) { assert.Equal(t, testGlobal.senderInfo.addr, addr, "invalid address by alias after performing CreateAliasWithSig transaction") // Test stealing aliases. - _, err = to.tp.performCreateAliasWithSig(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performCreateAliasWithSig(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performCreateAliasWithSig() failed") to.stor.flush(t) err = to.stor.entities.aliases.disableStolenAliases(blockID0) @@ -352,7 +360,7 @@ func TestPerformCreateAliasWithProofs(t *testing.T) { to.stor.addBlock(t, blockID0) tx := createCreateAliasWithProofs(t) - _, err := to.tp.performCreateAliasWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performCreateAliasWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performCreateAliasWithProofs() failed") to.stor.flush(t) addr, err := to.stor.entities.aliases.addrByAlias(tx.Alias.Alias) @@ -360,7 +368,7 @@ func TestPerformCreateAliasWithProofs(t *testing.T) { assert.Equal(t, testGlobal.senderInfo.addr, addr, "invalid address by alias after performing CreateAliasWithProofs transaction") // Test stealing aliases. - _, err = to.tp.performCreateAliasWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err = to.tp.performCreateAliasWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performCreateAliasWithProofs() failed") to.stor.flush(t) err = to.stor.entities.aliases.disableStolenAliases(blockID0) @@ -380,7 +388,7 @@ func TestPerformDataWithProofs(t *testing.T) { entry := &proto.IntegerDataEntry{Key: "TheKey", Value: int64(666)} tx.Entries = []proto.DataEntry{entry} - _, err := to.tp.performDataWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performDataWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performDataWithProofs() failed") to.stor.flush(t) @@ -396,7 +404,7 @@ func TestPerformSponsorshipWithProofs(t *testing.T) { to.stor.addBlock(t, blockID0) tx := createSponsorshipWithProofs(t, 1000) - _, err := to.tp.performSponsorshipWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performSponsorshipWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performSponsorshipWithProofs() failed") assetID := proto.AssetIDFromDigest(tx.AssetID) @@ -457,7 +465,7 @@ func TestPerformSetScriptWithProofs(t *testing.T) { require.NoError(t, err) tx := createSetScriptWithProofs(t, scriptBytes) - pi := *defaultPerformerInfo(to.stateActionsCounter) + pi := *defaultPerformerInfo() pi.checkerData.scriptEstimation = &scriptEstimation{} _, err = to.tp.performSetScriptWithProofs(tx, &pi, nil, nil) @@ -530,7 +538,7 @@ func TestPerformSetAssetScriptWithProofs(t *testing.T) { to.stor.addBlock(t, blockID0) tx := createSetAssetScriptWithProofs(t) - pi := *defaultPerformerInfo(to.stateActionsCounter) + pi := *defaultPerformerInfo() currentEstimatorVersion := 4 tree, err := serialization.Parse(tx.Script) @@ -625,12 +633,12 @@ func TestPerformUpdateAssetInfoWithProofs(t *testing.T) { assetInfo := to.stor.createAsset(t, testGlobal.asset0.asset.ID) tx := createUpdateAssetInfoWithProofs(t) - _, err := to.tp.performUpdateAssetInfoWithProofs(tx, defaultPerformerInfo(to.stateActionsCounter), nil, nil) + _, err := to.tp.performUpdateAssetInfoWithProofs(tx, defaultPerformerInfo(), nil, nil) assert.NoError(t, err, "performUpdateAssetInfoWithProofs() failed") to.stor.flush(t) assetInfo.name = tx.Name assetInfo.description = tx.Description - assetInfo.lastNameDescChangeHeight = checkerInfo.height + 1 + assetInfo.lastNameDescChangeHeight = checkerInfo.blockchainHeight + 1 // Check asset info. info, err := to.stor.entities.assets.assetInfo(proto.AssetIDFromDigest(tx.AssetID)) From 398e237984740cc7855d38eb868ce60aae467348 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Dec 2023 23:19:19 +0300 Subject: [PATCH 6/8] Bump google.golang.org/grpc from 1.60.0 to 1.60.1 (#1283) Bumps [google.golang.org/grpc](https://github.com/grpc/grpc-go) from 1.60.0 to 1.60.1. - [Release notes](https://github.com/grpc/grpc-go/releases) - [Commits](https://github.com/grpc/grpc-go/compare/v1.60.0...v1.60.1) --- updated-dependencies: - dependency-name: google.golang.org/grpc dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index e914aa0cf..5ade204e8 100644 --- a/go.mod +++ b/go.mod @@ -48,7 +48,7 @@ require ( golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 golang.org/x/sync v0.5.0 golang.org/x/sys v0.15.0 - google.golang.org/grpc v1.60.0 + google.golang.org/grpc v1.60.1 google.golang.org/protobuf v1.31.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 moul.io/zapfilter v1.7.0 diff --git a/go.sum b/go.sum index 7848d35b8..c0c979355 100644 --- a/go.sum +++ b/go.sum @@ -472,8 +472,8 @@ golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f h1:ultW7fxlIvee4HYrtnaRPon9HpEgFk5zYpmfMgtKB5I= google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f/go.mod h1:L9KNLi232K1/xB6f7AlSX692koaRnKaWSR0stBki0Yc= -google.golang.org/grpc v1.60.0 h1:6FQAR0kM31P6MRdeluor2w2gPaS4SVNrD/DNTxrQ15k= -google.golang.org/grpc v1.60.0/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= +google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU= +google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= From 33ce8b4865996e656850b14e92af80d2a77d00c1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Dec 2023 20:54:37 +0000 Subject: [PATCH 7/8] Bump golang.org/x/crypto from 0.16.0 to 0.17.0 (#1282) --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 5ade204e8..1bd6d1d49 100644 --- a/go.mod +++ b/go.mod @@ -44,7 +44,7 @@ require ( github.com/xenolf/lego v2.7.2+incompatible go.uber.org/atomic v1.11.0 go.uber.org/zap v1.26.0 - golang.org/x/crypto v0.16.0 + golang.org/x/crypto v0.17.0 golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 golang.org/x/sync v0.5.0 golang.org/x/sys v0.15.0 diff --git a/go.sum b/go.sum index c0c979355..ef38708cd 100644 --- a/go.sum +++ b/go.sum @@ -348,8 +348,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I= -golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY= -golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 h1:m64FZMko/V45gv0bNmrNYoDEq8U5YUhetc9cBWKS1TQ= golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63/go.mod h1:0v4NqG35kSWCMzLaMeX+IQrlSnVE/bqGSyC2cz/9Le8= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= From 14214232a36faf6af04a61db1a880765749a0a7d Mon Sep 17 00:00:00 2001 From: Nikolay Eskov Date: Wed, 20 Dec 2023 19:59:32 +0300 Subject: [PATCH 8/8] Create 'ITests' workflow for intergration tests. (#1281) --- .github/workflows/go.yml | 32 ---------------------------- .github/workflows/itests.yml | 41 ++++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 32 deletions(-) create mode 100644 .github/workflows/itests.yml diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index c25679254..318b632c0 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -67,35 +67,3 @@ jobs: file: ./coverage.txt flags: unittests yml: ./codecov.yml - - itest: - name: integration_tests - runs-on: self-hosted - if: github.event_name == 'pull_request' && github.repository == 'wavesplatform/gowaves' - - steps: - - name: Set up Go 1.20 - uses: actions/setup-go@v5.0.0 - with: - go-version: 1.20.x - check-latest: true - cache: true - id: go - - - name: Check out code into the Go module directory - uses: actions/checkout@v4 - - - name: Get dependencies - run: go mod vendor - - - name: Tests - run: make itest - - - name: Upload itest logs - uses: actions/upload-artifact@v4 - if: failure() - with: - name: itest_logs - path: build/logs/ - if-no-files-found: warn - retention-days: 5 diff --git a/.github/workflows/itests.yml b/.github/workflows/itests.yml new file mode 100644 index 000000000..0fc935f08 --- /dev/null +++ b/.github/workflows/itests.yml @@ -0,0 +1,41 @@ +name: "ITests" +on: + workflow_dispatch: + pull_request_review: + types: [ submitted ] + branches: [ master ] + +jobs: + itest: + name: integration_tests + runs-on: self-hosted + environment: + name: ITests + if: (github.event_name == 'workflow_dispatch' || github.event.review.state == 'APPROVED') && github.repository == 'wavesplatform/gowaves' + + steps: + - name: Set up Go 1.20 + uses: actions/setup-go@v5.0.0 + with: + go-version: 1.20.x + check-latest: true + cache: true + id: go + + - name: Check out code into the Go module directory + uses: actions/checkout@v4 + + - name: Get dependencies + run: go mod vendor + + - name: Tests + run: make itest + + - name: Upload itest logs + uses: actions/upload-artifact@v4 + if: failure() + with: + name: itest_logs + path: build/logs/ + if-no-files-found: warn + retention-days: 5