From 7150fc2ea5ae9ffb82ffd5615e5474d364164aa2 Mon Sep 17 00:00:00 2001 From: xiaoguang Date: Wed, 11 Dec 2024 15:43:30 +0800 Subject: [PATCH] feat: add tonlib-core code --- Cargo.lock | 26 +- Cargo.toml | 1 + token-core/tcx-libs/tonlib-core/Cargo.lock | 336 +++++++++ token-core/tcx-libs/tonlib-core/Cargo.toml | 24 + token-core/tcx-libs/tonlib-core/README.md | 94 +++ .../resources/wallet/highload_v1r1.code | 1 + .../resources/wallet/highload_v1r2.code | 1 + .../resources/wallet/highload_v2.code | 1 + .../resources/wallet/highload_v2r1.code | 1 + .../resources/wallet/highload_v2r2.code | 1 + .../resources/wallet/wallet_v1r1.code | 1 + .../resources/wallet/wallet_v1r2.code | 1 + .../resources/wallet/wallet_v1r3.code | 1 + .../resources/wallet/wallet_v2r1.code | 1 + .../resources/wallet/wallet_v2r2.code | 1 + .../resources/wallet/wallet_v3r1.code | 1 + .../resources/wallet/wallet_v3r2.code | 1 + .../resources/wallet/wallet_v4r1.code | 1 + .../resources/wallet/wallet_v4r2.code | 1 + .../resources/wallet/wallet_v5.code | 1 + token-core/tcx-libs/tonlib-core/src/cell.rs | 564 ++++++++++++++ .../tonlib-core/src/cell/bag_of_cells.rs | 107 +++ .../tcx-libs/tonlib-core/src/cell/builder.rs | 712 ++++++++++++++++++ .../tonlib-core/src/cell/cell_type.rs | 381 ++++++++++ .../tcx-libs/tonlib-core/src/cell/dict.rs | 13 + .../tonlib-core/src/cell/dict/builder.rs | 197 +++++ .../src/cell/dict/leading_bit_utils.rs | 84 +++ .../tonlib-core/src/cell/dict/parser.rs | 125 +++ .../src/cell/dict/predefined_readers.rs | 99 +++ .../src/cell/dict/predefined_writers.rs | 35 + .../tonlib-core/src/cell/dict/tests.rs | 232 ++++++ .../tonlib-core/src/cell/dict/types.rs | 18 + .../tcx-libs/tonlib-core/src/cell/error.rs | 115 +++ .../tonlib-core/src/cell/level_mask.rs | 48 ++ .../tcx-libs/tonlib-core/src/cell/parser.rs | 638 ++++++++++++++++ .../tcx-libs/tonlib-core/src/cell/raw.rs | 341 +++++++++ .../tonlib-core/src/cell/raw_boc_from_boc.rs | 153 ++++ .../tcx-libs/tonlib-core/src/cell/slice.rs | 107 +++ .../tonlib-core/src/cell/state_init.rs | 129 ++++ .../tcx-libs/tonlib-core/src/cell/util.rs | 34 + token-core/tcx-libs/tonlib-core/src/lib.rs | 10 + token-core/tcx-libs/tonlib-core/src/types.rs | 14 + .../tcx-libs/tonlib-core/src/types/address.rs | 486 ++++++++++++ .../tcx-libs/tonlib-core/src/types/error.rs | 17 + 44 files changed, 5152 insertions(+), 3 deletions(-) create mode 100644 token-core/tcx-libs/tonlib-core/Cargo.lock create mode 100644 token-core/tcx-libs/tonlib-core/Cargo.toml create mode 100644 token-core/tcx-libs/tonlib-core/README.md create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/highload_v1r1.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/highload_v1r2.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2r1.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2r2.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r1.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r2.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r3.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v2r1.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v2r2.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v3r1.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v3r2.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v4r1.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v4r2.code create mode 100644 token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v5.code create mode 100644 token-core/tcx-libs/tonlib-core/src/cell.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/bag_of_cells.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/builder.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/cell_type.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/dict.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/dict/builder.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/dict/leading_bit_utils.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/dict/parser.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/dict/predefined_readers.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/dict/predefined_writers.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/dict/tests.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/dict/types.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/error.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/level_mask.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/parser.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/raw.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/raw_boc_from_boc.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/slice.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/state_init.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/cell/util.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/lib.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/types.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/types/address.rs create mode 100644 token-core/tcx-libs/tonlib-core/src/types/error.rs diff --git a/Cargo.lock b/Cargo.lock index d6dd56b0..1d3565d1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -378,9 +378,9 @@ checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" [[package]] name = "bitstream-io" -version = "2.5.3" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b81e1519b0d82120d2fd469d5bfb2919a9361c48b02d82d04befc1cdd2002452" +checksum = "6099cdc01846bc367c4e7dd630dc5966dccf36b652fae7a74e17b640411a91b2" [[package]] name = "bitvec" @@ -5114,7 +5114,7 @@ dependencies = [ "tcx-crypto", "tcx-keystore", "tcx-primitive", - "tonlib-core", + "tonlib-core 0.19.1", ] [[package]] @@ -5416,6 +5416,26 @@ dependencies = [ "thiserror", ] +[[package]] +name = "tonlib-core" +version = "0.21.1" +dependencies = [ + "base64 0.22.1", + "bitstream-io", + "crc", + "hex", + "hmac 0.12.1", + "lazy_static", + "nacl", + "num-bigint 0.4.3", + "num-traits", + "pbkdf2 0.11.0", + "serde", + "serde_json", + "sha2 0.10.8", + "thiserror", +] + [[package]] name = "tower-service" version = "0.3.2" diff --git a/Cargo.toml b/Cargo.toml index ec33b92a..dfc1439a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,6 +22,7 @@ members = [ "token-core/tcx-common", "token-core/tcx-migration", "token-core/tcx-libs/ed25519-dalek-bip32", + "token-core/tcx-libs/tonlib-core", "imkey-core/ikc", "imkey-core/ikc-common", "imkey-core/ikc-device", diff --git a/token-core/tcx-libs/tonlib-core/Cargo.lock b/token-core/tcx-libs/tonlib-core/Cargo.lock new file mode 100644 index 00000000..253a5623 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/Cargo.lock @@ -0,0 +1,336 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bitstream-io" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6099cdc01846bc367c4e7dd630dc5966dccf36b652fae7a74e17b640411a91b2" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cpufeatures" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "itoa" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.168" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" + +[[package]] +name = "nacl" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30aefc44d813c51b5e7952950e87c17f2e0e1a3274d63c8281a701e05323d548" + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "password-hash" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" +dependencies = [ + "base64ct", + "rand_core", + "subtle", +] + +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest", + "hmac", + "password-hash", + "sha2", +] + +[[package]] +name = "proc-macro2" +version = "1.0.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "serde" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "thiserror" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tonlib-core" +version = "0.21.1" +dependencies = [ + "base64", + "bitstream-io", + "crc", + "hex", + "hmac", + "lazy_static", + "nacl", + "num-bigint", + "num-traits", + "pbkdf2", + "serde", + "serde_json", + "sha2", + "thiserror", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-ident" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" diff --git a/token-core/tcx-libs/tonlib-core/Cargo.toml b/token-core/tcx-libs/tonlib-core/Cargo.toml new file mode 100644 index 00000000..a91f0963 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "tonlib-core" +description = "Rust SDK for The Open Network" +version = "0.21.1" +edition = "2021" +license = "MIT" +repository = "https://github.com/ston-fi/tonlib-rs" +resolver = "2" + +[dependencies] +base64 = "0.22" +bitstream-io = "=2.6.0" +crc = "=3.2.1" +hex = "=0.4.3" +hmac = {version = "=0.12.1", features = ["std"]} +lazy_static = "=1.4.0" +nacl = "0.5.3" +num-bigint = { version = "=0.4.3", features = ["serde"] } +num-traits = "=0.2.19" +pbkdf2 = { version="=0.11.0", features = ["simple"] } +serde = { version = "=1.0.210", features = ["derive"] } +serde_json = "=1.0.89" +sha2 = "=0.10.8" +thiserror = "=1.0.56" \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/README.md b/token-core/tcx-libs/tonlib-core/README.md new file mode 100644 index 00000000..6febe97f --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/README.md @@ -0,0 +1,94 @@ +# Rust SDK for The Open Network + +Rust SDK for [The Open Network](https://ton.org/) + +## Features + +* Support parsing and generation of Cell and BagOfCell for more convenient interaction with data structures +* Support of existing Wallet versions +* Derive wallet address +* Support of TON Mnemonics +* NaCl-compatible Ed25519 signing of transactions + +## Usage + +To use this library in your Rust application, add the following to your Cargo.toml file: + +```toml +[dependencies] +tonlib-core = "version" +``` + +Then, in your Rust code, you can import the library with: + +```rust +use tonlib_core; +``` + +## Package contents + +### Cell + +Data structures and helpers for building and parsing Cell and Bag of Cells. See the documentation on [ton.org ](https://docs.ton.org/develop/data-formats/cell-boc)for details. + +### Message + +Data structures, builders, and parsers for Message +See the documentation on [ton.org ](https://docs.ton.org/develop/smart-contracts/messages)for details. + +Includes standard messages for Jetton, NFT, and Soulbound NFT, specified by [TON Enhancement Proposal](https://github.com/ton-blockchain/TEPs/blob/master/text/0001-tep-lifecycle.md). + +### Mnemonic + +Data structure to store mnemonic. + +### Types + +Data structures for storage and easy conversion of [Ton Smart-contract Address](https://docs.ton.org/learn/overviews/addresses) and [Ton Transaction Id](https://docs.ton.org/develop/data-formats/transaction-layout#transaction) + + +### Wallet + +Data structure for deriving wallet addresses. + +## Usage examples + +### Cell + +Creating a `Cell` and writing data to it: + +``` rust +use anyhow::anyhow; +use tonlib_core::TonAddress; +use tonlib_core::cell::CellBuilder; + +fn write_cell() -> anyhow::Result<()> { +let mut writer = CellBuilder::new(); +let addr = TonAddress::from_base64_url("EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjrR")?; +let cell = writer + .store_u32(32, 0xFAD45AADu32)? + .store_bit(true)? + .store_u8(8, 234u8)? + .store_slice(&[0xFA, 0xD4, 0x5A, 0xAD, 0xAA, 0x12, 0xFF, 0x45])? + .store_address(&addr)? + .store_string("Hello, TON")? + .build()?; + # Ok(()) +} +``` + + Reading data from a `Cell`: + +```rust +use tonlib_core::cell::Cell; +fn read_cell(cell: Cell) -> anyhow::Result<()> { + let mut reader = cell.parser(); + let u32_value = reader.load_u32(32)?; + let bit_value = reader.load_bit()?; + let u8_value = reader.load_u8(8)?; + let bytes_value = reader.load_bytes(8)?; + let address_value = reader.load_address()?; + let str_value = reader.ensure_empty()?; + Ok(()) +} +``` \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v1r1.code b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v1r1.code new file mode 100644 index 00000000..70ae403f --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v1r1.code @@ -0,0 +1 @@ +te6ccgEBBgEAhgABFP8A9KQT9KDyyAsBAgEgAgMCAUgEBQC88oMI1xgg0x/TH9Mf+CMTu/Jj7UTQ0x/TH9P/0VEyuvKhUUS68qIE+QFUEFX5EPKj9ATR+AB/jhghgBD0eG+hb6EgmALTB9QwAfsAkTLiAbPmWwGkyMsfyx/L/8ntVAAE0DAAEaCZL9qJoa4WPw== \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v1r2.code b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v1r2.code new file mode 100644 index 00000000..a98d0838 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v1r2.code @@ -0,0 +1 @@ +te6ccgEBCAEAmQABFP8A9KQT9LzyyAsBAgEgAgMCAUgEBQC88oMI1xgg0x/TH9Mf+CMTu/Jj7UTQ0x/TH9P/0VEyuvKhUUS68qIE+QFUEFX5EPKj9ATR+AB/jhghgBD0eG+hb6EgmALTB9QwAfsAkTLiAbPmWwGkyMsfyx/L/8ntVAAE0DACAUgGBwAXuznO1E0NM/MdcL/4ABG4yX7UTQ1wsfg= \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2.code b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2.code new file mode 100644 index 00000000..865330db --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2.code @@ -0,0 +1 @@ +te6ccgEBCQEA5QABFP8A9KQT9LzyyAsBAgEgAgcCAUgDBAAE0DACASAFBgAXvZznaiaGmvmOuF/8AEG+X5dqJoaY+Y6Z/p/5j6AmipEEAgegc30JjJLb/JXdHxQB6vKDCNcYINMf0z/4I6ofUyC58mPtRNDTH9M/0//0BNFTYIBA9A5voTHyYFFzuvKiB/kBVBCH+RDyowL0BNH4AH+OFiGAEPR4b6UgmALTB9QwAfsAkTLiAbPmW4MlochANIBA9EOK5jEByMsfE8s/y//0AMntVAgANCCAQPSWb6VsEiCUMFMDud4gkzM2AZJsIeKz \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2r1.code b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2r1.code new file mode 100644 index 00000000..8e015795 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2r1.code @@ -0,0 +1 @@ +te6ccgEBBwEA1gABFP8A9KQT9KDyyAsBAgEgAgMCAUgEBQHu8oMI1xgg0x/TP/gjqh9TILnyY+1E0NMf0z/T//QE0VNggED0Dm+hMfJgUXO68qIH+QFUEIf5EPKjAvQE0fgAf44YIYAQ9HhvoW+hIJgC0wfUMAH7AJEy4gGz5luDJaHIQDSAQPRDiuYxyBLLHxPLP8v/9ADJ7VQGAATQMABBoZfl2omhpj5jpn+n/mPoCaKkQQCB6BzfQmMktv8ld0fFADgggED0lm+hb6EyURCUMFMDud4gkzM2AZIyMOKz \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2r2.code b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2r2.code new file mode 100644 index 00000000..bde5df02 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/highload_v2r2.code @@ -0,0 +1 @@ +te6ccgEBCQEA6QABFP8A9KQT9LzyyAsBAgEgAgMCAUgEBQHu8oMI1xgg0x/TP/gjqh9TILnyY+1E0NMf0z/T//QE0VNggED0Dm+hMfJgUXO68qIH+QFUEIf5EPKjAvQE0fgAf44YIYAQ9HhvoW+hIJgC0wfUMAH7AJEy4gGz5luDJaHIQDSAQPRDiuYxyBLLHxPLP8v/9ADJ7VQIAATQMAIBIAYHABe9nOdqJoaa+Y64X/wAQb5fl2omhpj5jpn+n/mPoCaKkQQCB6BzfQmMktv8ld0fFAA4IIBA9JZvoW+hMlEQlDBTA7neIJMzNgGSMjDisw== \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r1.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r1.code new file mode 100644 index 00000000..6dc38b0c --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r1.code @@ -0,0 +1 @@ +te6cckEBAQEARAAAhP8AIN2k8mCBAgDXGCDXCx/tRNDTH9P/0VESuvKhIvkBVBBE+RDyovgAAdMfMSDXSpbTB9QC+wDe0aTIyx/L/8ntVEH98Ik= \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r2.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r2.code new file mode 100644 index 00000000..02b98962 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r2.code @@ -0,0 +1 @@ +te6cckEBAQEAUwAAov8AIN0gggFMl7qXMO1E0NcLH+Ck8mCBAgDXGCDXCx/tRNDTH9P/0VESuvKhIvkBVBBE+RDyovgAAdMfMSDXSpbTB9QC+wDe0aTIyx/L/8ntVNDieG8= \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r3.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r3.code new file mode 100644 index 00000000..1c67c711 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v1r3.code @@ -0,0 +1 @@ +te6cckEBAQEAXwAAuv8AIN0gggFMl7ohggEznLqxnHGw7UTQ0x/XC//jBOCk8mCBAgDXGCDXCx/tRNDTH9P/0VESuvKhIvkBVBBE+RDyovgAAdMfMSDXSpbTB9QC+wDe0aTIyx/L/8ntVLW4bkI= \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v2r1.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v2r1.code new file mode 100644 index 00000000..6cbf1206 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v2r1.code @@ -0,0 +1 @@ +te6cckEBAQEAVwAAqv8AIN0gggFMl7qXMO1E0NcLH+Ck8mCDCNcYINMf0x8B+CO78mPtRNDTH9P/0VExuvKhA/kBVBBC+RDyovgAApMg10qW0wfUAvsA6NGkyMsfy//J7VShNwu2 \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v2r2.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v2r2.code new file mode 100644 index 00000000..81d52d11 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v2r2.code @@ -0,0 +1 @@ +te6cckEBAQEAYwAAwv8AIN0gggFMl7ohggEznLqxnHGw7UTQ0x/XC//jBOCk8mCDCNcYINMf0x8B+CO78mPtRNDTH9P/0VExuvKhA/kBVBBC+RDyovgAApMg10qW0wfUAvsA6NGkyMsfy//J7VQETNeh \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v3r1.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v3r1.code new file mode 100644 index 00000000..b4073e47 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v3r1.code @@ -0,0 +1 @@ +te6cckEBAQEAYgAAwP8AIN0gggFMl7qXMO1E0NcLH+Ck8mCDCNcYINMf0x/TH/gjE7vyY+1E0NMf0x/T/9FRMrryoVFEuvKiBPkBVBBV+RDyo/gAkyDXSpbTB9QC+wDo0QGkyMsfyx/L/8ntVD++buA= \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v3r2.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v3r2.code new file mode 100644 index 00000000..18dbaf76 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v3r2.code @@ -0,0 +1 @@ +te6cckEBAQEAcQAA3v8AIN0gggFMl7ohggEznLqxn3Gw7UTQ0x/THzHXC//jBOCk8mCDCNcYINMf0x/TH/gjE7vyY+1E0NMf0x/T/9FRMrryoVFEuvKiBPkBVBBV+RDyo/gAkyDXSpbTB9QC+wDo0QGkyMsfyx/L/8ntVBC9ba0= \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v4r1.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v4r1.code new file mode 100644 index 00000000..c27d4e89 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v4r1.code @@ -0,0 +1 @@ +te6cckECFQEAAvUAART/APSkE/S88sgLAQIBIAIDAgFIBAUE+PKDCNcYINMf0x/THwL4I7vyY+1E0NMf0x/T//QE0VFDuvKhUVG68qIF+QFUEGT5EPKj+AAkpMjLH1JAyx9SMMv/UhD0AMntVPgPAdMHIcAAn2xRkyDXSpbTB9QC+wDoMOAhwAHjACHAAuMAAcADkTDjDQOkyMsfEssfy/8REhMUA+7QAdDTAwFxsJFb4CHXScEgkVvgAdMfIYIQcGx1Z70ighBibG5jvbAighBkc3RyvbCSXwPgAvpAMCD6RAHIygfL/8nQ7UTQgQFA1yH0BDBcgQEI9ApvoTGzkl8F4ATTP8glghBwbHVnupEx4w0kghBibG5juuMABAYHCAIBIAkKAFAB+gD0BDCCEHBsdWeDHrFwgBhQBcsFJ88WUAP6AvQAEstpyx9SEMs/AFL4J28ighBibG5jgx6xcIAYUAXLBSfPFiT6AhTLahPLH1Iwyz8B+gL0AACSghBkc3Ryuo41BIEBCPRZMO1E0IEBQNcgyAHPFvQAye1UghBkc3Rygx6xcIAYUATLBVjPFiL6AhLLassfyz+UEDRfBOLJgED7AAIBIAsMAFm9JCtvaiaECAoGuQ+gIYRw1AgIR6STfSmRDOaQPp/5g3gSgBt4EBSJhxWfMYQCAVgNDgARuMl+1E0NcLH4AD2ynftRNCBAUDXIfQEMALIygfL/8nQAYEBCPQKb6ExgAgEgDxAAGa3OdqJoQCBrkOuF/8AAGa8d9qJoQBBrkOuFj8AAbtIH+gDU1CL5AAXIygcVy//J0Hd0gBjIywXLAiLPFlAF+gIUy2sSzMzJcfsAyEAUgQEI9FHypwIAbIEBCNcYyFQgJYEBCPRR8qeCEG5vdGVwdIAYyMsFywJQBM8WghAF9eEA+gITy2oSyx/JcfsAAgBygQEI1xgwUgKBAQj0WfKn+CWCEGRzdHJwdIAYyMsFywJQBc8WghAF9eEA+gIUy2oTyx8Syz/Jc/sAAAr0AMntVEap808= \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v4r2.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v4r2.code new file mode 100644 index 00000000..e1d04cde --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v4r2.code @@ -0,0 +1 @@ +te6cckECFAEAAtQAART/APSkE/S88sgLAQIBIAIDAgFIBAUE+PKDCNcYINMf0x/THwL4I7vyZO1E0NMf0x/T//QE0VFDuvKhUVG68qIF+QFUEGT5EPKj+AAkpMjLH1JAyx9SMMv/UhD0AMntVPgPAdMHIcAAn2xRkyDXSpbTB9QC+wDoMOAhwAHjACHAAuMAAcADkTDjDQOkyMsfEssfy/8QERITAubQAdDTAyFxsJJfBOAi10nBIJJfBOAC0x8hghBwbHVnvSKCEGRzdHK9sJJfBeAD+kAwIPpEAcjKB8v/ydDtRNCBAUDXIfQEMFyBAQj0Cm+hMbOSXwfgBdM/yCWCEHBsdWe6kjgw4w0DghBkc3RyupJfBuMNBgcCASAICQB4AfoA9AQw+CdvIjBQCqEhvvLgUIIQcGx1Z4MesXCAGFAEywUmzxZY+gIZ9ADLaRfLH1Jgyz8gyYBA+wAGAIpQBIEBCPRZMO1E0IEBQNcgyAHPFvQAye1UAXKwjiOCEGRzdHKDHrFwgBhQBcsFUAPPFiP6AhPLassfyz/JgED7AJJfA+ICASAKCwBZvSQrb2omhAgKBrkPoCGEcNQICEekk30pkQzmkD6f+YN4EoAbeBAUiYcVnzGEAgFYDA0AEbjJftRNDXCx+AA9sp37UTQgQFA1yH0BDACyMoHy//J0AGBAQj0Cm+hMYAIBIA4PABmtznaiaEAga5Drhf/AABmvHfaiaEAQa5DrhY/AAG7SB/oA1NQi+QAFyMoHFcv/ydB3dIAYyMsFywIizxZQBfoCFMtrEszMyXP7AMhAFIEBCPRR8qcCAHCBAQjXGPoA0z/IVCBHgQEI9FHyp4IQbm90ZXB0gBjIywXLAlAGzxZQBPoCFMtqEssfyz/Jc/sAAgBsgQEI1xj6ANM/MFIkgQEI9Fnyp4IQZHN0cnB0gBjIywXLAlAFzxZQA/oCE8tqyx8Syz/Jc/sAAAr0AMntVGliJeU= \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v5.code b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v5.code new file mode 100644 index 00000000..fe173493 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/resources/wallet/wallet_v5.code @@ -0,0 +1 @@ +te6ccgECFAEAAoEAART/APSkE/S88sgLAQIBIAIDAgFIBAUBAvIOAtzQINdJwSCRW49jINcLHyCCEGV4dG69IYIQc2ludL2wkl8D4IIQZXh0brqOtIAg1yEB0HTXIfpAMPpE+Cj6RDBYvZFb4O1E0IEBQdch9AWDB/QOb6ExkTDhgEDXIXB/2zzgMSDXSYECgLmRMOBw4hAPAgEgBgcCASAICQAZvl8PaiaECAoOuQ+gLAIBbgoLAgFIDA0AGa3OdqJoQCDrkOuF/8AAGa8d9qJoQBDrkOuFj8AAF7Ml+1E0HHXIdcLH4AARsmL7UTQ1woAgAR4g1wsfghBzaWduuvLgin8PAeaO8O2i7fshgwjXIgKDCNcjIIAg1yHTH9Mf0x/tRNDSANMfINMf0//XCgAK+QFAzPkQmiiUXwrbMeHywIffArNQB7Dy0IRRJbry4IVQNrry4Ib4I7vy0IgikvgA3gGkf8jKAMsfAc8Wye1UIJL4D95w2zzYEAP27aLt+wL0BCFukmwhjkwCIdc5MHCUIccAs44tAdcoIHYeQ2wg10nACPLgkyDXSsAC8uCTINcdBscSwgBSMLDy0InXTNc5MAGk6GwShAe78uCT10rAAPLgk+1V4tIAAcAAkVvg69csCBQgkXCWAdcsCBwS4lIQseMPINdKERITAJYB+kAB+kT4KPpEMFi68uCR7UTQgQFB1xj0BQSdf8jKAEAEgwf0U/Lgi44UA4MH9Fvy4Iwi1woAIW4Bs7Dy0JDiyFADzxYS9ADJ7VQAcjDXLAgkji0h8uCS0gDtRNDSAFETuvLQj1RQMJExnAGBAUDXIdcKAPLgjuLIygBYzxbJ7VST8sCN4gAQk1vbMeHXTNA= \ No newline at end of file diff --git a/token-core/tcx-libs/tonlib-core/src/cell.rs b/token-core/tcx-libs/tonlib-core/src/cell.rs new file mode 100644 index 00000000..3e2f8d61 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell.rs @@ -0,0 +1,564 @@ +use std::fmt::{Debug, Formatter}; +use std::hash::Hash; +use std::ops::Deref; +use std::sync::Arc; +use std::{fmt, io}; + +pub use bag_of_cells::*; +use base64::engine::general_purpose::URL_SAFE_NO_PAD; +use base64::Engine; +use bitstream_io::{BigEndian, BitWrite, BitWriter}; +pub use builder::*; +pub use error::*; +use hmac::digest::Digest; +use lazy_static::lazy_static; +pub use parser::*; +pub use raw::*; +use sha2::Sha256; +pub use slice::*; +pub use state_init::*; +pub use util::*; + +use crate::cell::cell_type::CellType; +use crate::cell::level_mask::LevelMask; +use crate::types::DEFAULT_CELL_HASH; +use crate::TonHash; + +mod bag_of_cells; +mod builder; + +mod cell_type; +pub mod dict; +mod error; +mod level_mask; +mod parser; +mod raw; +mod raw_boc_from_boc; +mod slice; +mod state_init; +mod util; +const DEPTH_BYTES: usize = 2; +const MAX_LEVEL: u8 = 3; + +pub type ArcCell = Arc; + +lazy_static! { + pub static ref EMPTY_CELL: Cell = Cell::default(); + pub static ref EMPTY_ARC_CELL: ArcCell = Arc::new(Cell::default()); +} + +#[derive(PartialEq, Eq, Clone, Hash)] +pub struct Cell { + data: Vec, + bit_len: usize, + references: Vec, + cell_type: CellType, + level_mask: LevelMask, + hashes: [TonHash; 4], + depths: [u16; 4], +} + +impl Cell { + pub fn new( + data: Vec, + bit_len: usize, + references: Vec, + is_exotic: bool, + ) -> Result { + let cell_type = if is_exotic { + CellType::determine_exotic_cell_type(&data)? + } else { + CellType::Ordinary + }; + + cell_type.validate(&data, bit_len, &references)?; + let level_mask = cell_type.level_mask(&data, bit_len, &references)?; + let (hashes, depths) = + calculate_hashes_and_depths(cell_type, &data, bit_len, &references, level_mask)?; + + let result = Self { + data, + bit_len, + references, + level_mask, + cell_type, + hashes, + depths, + }; + + Ok(result) + } + + pub fn parser(&self) -> CellParser { + CellParser::new(self.bit_len, &self.data, &self.references) + } + + #[allow(clippy::let_and_return)] + pub fn parse(&self, parse: F) -> Result + where + F: FnOnce(&mut CellParser) -> Result, + { + let mut parser = self.parser(); + let res = parse(&mut parser); + res + } + + pub fn parse_fully(&self, parse: F) -> Result + where + F: FnOnce(&mut CellParser) -> Result, + { + let mut reader = self.parser(); + let res = parse(&mut reader); + reader.ensure_empty()?; + res + } + + pub fn reference(&self, idx: usize) -> Result<&ArcCell, TonCellError> { + self.references.get(idx).ok_or(TonCellError::InvalidIndex { + idx, + ref_count: self.references.len(), + }) + } + + pub fn data(&self) -> &[u8] { + self.data.as_slice() + } + + pub fn bit_len(&self) -> usize { + self.bit_len + } + + pub fn references(&self) -> &[ArcCell] { + self.references.as_slice() + } + + pub(crate) fn get_level_mask(&self) -> u32 { + self.level_mask.mask() + } + + pub fn cell_depth(&self) -> u16 { + self.get_depth(MAX_LEVEL) + } + + pub fn get_depth(&self, level: u8) -> u16 { + self.depths[level.min(3) as usize] + } + + pub fn cell_hash(&self) -> TonHash { + self.get_hash(MAX_LEVEL) + } + + pub fn get_hash(&self, level: u8) -> TonHash { + self.hashes[level.min(3) as usize] + } + + pub fn is_exotic(&self) -> bool { + self.cell_type != CellType::Ordinary + } + + pub fn cell_hash_base64(&self) -> String { + URL_SAFE_NO_PAD.encode(self.cell_hash()) + } + + pub fn load_snake_formatted_string(&self) -> Result { + let mut cell: &Cell = self; + let mut first_cell = true; + let mut uri = String::new(); + loop { + let parsed_cell = if first_cell { + String::from_utf8_lossy(&cell.data[1..]).to_string() + } else { + String::from_utf8_lossy(&cell.data).to_string() + }; + uri.push_str(&parsed_cell); + match cell.references.len() { + 0 => return Ok(uri), + 1 => { + cell = cell.references[0].deref(); + first_cell = false; + } + n => { + return Err(TonCellError::boc_deserialization_error(format!( + "Invalid snake format string: found cell with {} references", + n + ))) + } + } + } + } + + fn parse_snake_data(&self, buffer: &mut Vec) -> Result<(), TonCellError> { + let mut cell = self; + let mut first_cell = true; + loop { + let mut parser = cell.parser(); + if first_cell { + let first_byte = parser.load_u8(8)?; + + if first_byte != 0 { + return Err(TonCellError::boc_deserialization_error( + "Invalid snake format", + )); + } + } + let remaining_bytes = parser.remaining_bytes(); + let mut data = parser.load_bytes(remaining_bytes)?; + buffer.append(&mut data); + match cell.references.len() { + 0 => return Ok(()), + 1 => { + cell = cell.references[0].deref(); + first_cell = false; + } + n => { + return Err(TonCellError::boc_deserialization_error(format!( + "Invalid snake format string: found cell with {} references", + n + ))) + } + } + } + } + + pub fn to_arc(self) -> ArcCell { + Arc::new(self) + } + + /// It is recommended to use CellParser::next_reference() instead + #[deprecated] + pub fn expect_reference_count(&self, expected_refs: usize) -> Result<(), TonCellError> { + let ref_count = self.references.len(); + if ref_count != expected_refs { + Err(TonCellError::CellParserError(format!( + "Cell should contain {} reference cells, actual: {}", + expected_refs, ref_count + ))) + } else { + Ok(()) + } + } +} + +impl Debug for Cell { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + let t = match self.cell_type { + CellType::Ordinary | CellType::Library => 'x', + CellType::PrunedBranch | CellType::MerkleProof => 'p', + CellType::MerkleUpdate => 'u', + }; + + // Our completion tag ONLY shows that the last byte is incomplete + // It does not correspond to real completion tag defined in + // p1.0.2 of https://docs.ton.org/tvm.pdf for details + // Null termination of bit-string defined in that document is omitted for clarity + let completion_tag = if self.bit_len % 8 != 0 { "_" } else { "" }; + writeln!( + f, + "Cell {}{{ data: [{}{}]\n, bit_len: {}\n, references: [", + t, + self.data + .iter() + .map(|&byte| format!("{:02X}", byte)) + .collect::>() + .join(""), + completion_tag, + self.bit_len, + )?; + + for reference in &self.references { + writeln!( + f, + " {}\n", + format!("{:?}", reference).replace('\n', "\n ") + )?; + } + + write!( + f, + "]\n cell_type: {:?}\n level_mask: {:?}\n hashes {:?}\n depths {:?}\n }}", + self.cell_type, + self.level_mask, + self.hashes + .iter() + .map(|h| h + .iter() + .map(|&byte| format!("{:02X}", byte)) + .collect::>() + .join("")) + .collect::>(), + self.depths + ) + } +} + +impl Default for Cell { + fn default() -> Self { + Self { + data: Default::default(), + bit_len: Default::default(), + references: Default::default(), + cell_type: Default::default(), + level_mask: Default::default(), + hashes: [DEFAULT_CELL_HASH; 4], + depths: Default::default(), + } + } +} + +fn get_repr_for_data( + original_data_bit_len: usize, + (data, data_bit_len): (&[u8], usize), + refs: &[ArcCell], + level_mask: LevelMask, + level: u8, + cell_type: CellType, +) -> Result, TonCellError> { + // Allocate + let data_len = data.len(); + // descriptors + data + (hash + depth) * refs_count + let buffer_len = 2 + data_len + (32 + 2) * refs.len(); + + let mut writer = BitWriter::endian(Vec::with_capacity(buffer_len), BigEndian); + let d1 = get_refs_descriptor(cell_type, refs, level_mask.apply(level).mask())?; + let d2 = get_bits_descriptor(original_data_bit_len)?; + + // Write descriptors + writer.write(8, d1).map_cell_parser_error()?; + writer.write(8, d2).map_cell_parser_error()?; + // Write main data + write_data(&mut writer, data, data_bit_len).map_cell_parser_error()?; + // Write ref data + write_ref_depths(&mut writer, refs, cell_type, level)?; + write_ref_hashes(&mut writer, refs, cell_type, level)?; + + let result = writer + .writer() + .ok_or_else(|| TonCellError::cell_builder_error("Stream for cell repr is not byte-aligned")) + .map(|b| b.to_vec()); + + result +} + +/// This function replicates unknown logic of resolving cell data +/// https://github.com/ton-blockchain/ton/blob/24dc184a2ea67f9c47042b4104bbb4d82289fac1/crypto/vm/cells/DataCell.cpp#L214 +fn calculate_hashes_and_depths( + cell_type: CellType, + data: &[u8], + bit_len: usize, + references: &[ArcCell], + level_mask: LevelMask, +) -> Result<([TonHash; 4], [u16; 4]), TonCellError> { + let hash_count = if cell_type == CellType::PrunedBranch { + 1 + } else { + level_mask.hash_count() + }; + + let total_hash_count = level_mask.hash_count(); + let hash_i_offset = total_hash_count - hash_count; + + let mut depths: Vec = Vec::with_capacity(hash_count); + let mut hashes: Vec = Vec::with_capacity(hash_count); + + // Iterate through significant levels + for (hash_i, level_i) in (0..=level_mask.level()) + .filter(|&i| level_mask.is_significant(i)) + .enumerate() + { + if hash_i < hash_i_offset { + continue; + } + + let (current_data, current_bit_len) = if hash_i == hash_i_offset { + (data, bit_len) + } else { + let previous_hash = hashes + .get(hash_i - hash_i_offset - 1) + .ok_or_else(|| TonCellError::InternalError("Can't get right hash".to_owned()))?; + (previous_hash.as_slice(), 256) + }; + + // Calculate Depth + let depth = if references.is_empty() { + 0 + } else { + let max_ref_depth = references.iter().fold(0, |max_depth, reference| { + let child_depth = cell_type.child_depth(reference, level_i); + max_depth.max(child_depth) + }); + + max_ref_depth + 1 + }; + + // Calculate Hash + let repr = get_repr_for_data( + bit_len, + (current_data, current_bit_len), + references, + level_mask, + level_i, + cell_type, + )?; + let hash = Sha256::new_with_prefix(repr).finalize()[..] + .try_into() + .map_err(|error| { + TonCellError::InternalError(format!( + "Can't get [u8; 32] from finalized hash with error: {error}" + )) + })?; + + depths.push(depth); + hashes.push(hash); + } + + cell_type.resolve_hashes_and_depths(hashes, depths, data, bit_len, level_mask) +} + +/// Calculates d1 descriptor for cell +/// See https://docs.ton.org/tvm.pdf 3.1.4 for details +fn get_refs_descriptor( + cell_type: CellType, + references: &[ArcCell], + level_mask: u32, +) -> Result { + if references.len() > MAX_CELL_REFERENCES { + Err(TonCellError::InvalidCellData( + "Cell should not contain more than 4 references".to_string(), + )) + } else if level_mask > MAX_LEVEL_MASK { + Err(TonCellError::InvalidCellData( + "Cell level mask can not be higher than 3".to_string(), + )) + } else { + let cell_type_var = (cell_type != CellType::Ordinary) as u8; + let d1 = references.len() as u8 + 8 * cell_type_var + level_mask as u8 * 32; + Ok(d1) + } +} + +/// Calculates d2 descriptor for cell +/// See https://docs.ton.org/tvm.pdf 3.1.4 for details +fn get_bits_descriptor(bit_len: usize) -> Result { + if bit_len > MAX_CELL_BITS { + Err(TonCellError::InvalidCellData( + "Cell data length should not contain more than 1023 bits".to_string(), + )) + } else { + let d2 = (bit_len / 8 + (bit_len + 7) / 8) as u8; + Ok(d2) + } +} + +fn write_data( + writer: &mut BitWriter, BigEndian>, + data: &[u8], + bit_len: usize, +) -> Result<(), io::Error> { + let data_len = data.len(); + let rest_bits = bit_len % 8; + let full_bytes = rest_bits == 0; + + if !full_bytes { + writer.write_bytes(&data[..data_len - 1])?; + let last_byte = data[data_len - 1]; + let l = last_byte | 1 << (8 - rest_bits - 1); + writer.write(8, l)?; + } else { + writer.write_bytes(data)?; + } + + Ok(()) +} + +fn write_ref_depths( + writer: &mut BitWriter, BigEndian>, + refs: &[ArcCell], + parent_cell_type: CellType, + level: u8, +) -> Result<(), TonCellError> { + for reference in refs { + let child_depth = if matches!( + parent_cell_type, + CellType::MerkleProof | CellType::MerkleUpdate + ) { + reference.get_depth(level + 1) + } else { + reference.get_depth(level) + }; + + writer.write(8, child_depth / 256).map_cell_parser_error()?; + writer.write(8, child_depth % 256).map_cell_parser_error()?; + } + + Ok(()) +} + +fn write_ref_hashes( + writer: &mut BitWriter, BigEndian>, + refs: &[ArcCell], + parent_cell_type: CellType, + level: u8, +) -> Result<(), TonCellError> { + for reference in refs { + let child_hash = if matches!( + parent_cell_type, + CellType::MerkleProof | CellType::MerkleUpdate + ) { + reference.get_hash(level + 1) + } else { + reference.get_hash(level) + }; + + writer.write_bytes(&child_hash).map_cell_parser_error()?; + } + + Ok(()) +} + +#[cfg(test)] +mod test { + use std::sync::Arc; + + use super::cell_type::CellType; + use super::{get_bits_descriptor, get_refs_descriptor, Cell}; + use crate::cell::CellBuilder; + + #[test] + fn default_cell() { + let result = Cell::default(); + + let expected = Cell::new(vec![], 0, vec![], false).unwrap(); + + assert_eq!(result, expected) + } + + #[test] + fn d1_descriptor_test() { + let empty_cell = Arc::new(CellBuilder::new().build().unwrap()); + + let r1 = get_refs_descriptor(CellType::Ordinary, &[], 0).unwrap(); + assert_eq!(r1, 0); + + let r2 = get_refs_descriptor(CellType::Ordinary, &[], 4).is_err(); + assert!(r2); + + let r3 = get_refs_descriptor(CellType::Ordinary, &[empty_cell.clone()], 3).unwrap(); + assert_eq!(r3, 97); + + let r4 = + get_refs_descriptor(CellType::Ordinary, vec![empty_cell; 5].as_slice(), 3).is_err(); + assert!(r4); + } + + #[test] + fn d2_descriptor_test() { + let r1 = get_bits_descriptor(0).unwrap(); + assert_eq!(r1, 0); + + let r2 = get_bits_descriptor(1023).unwrap(); + assert_eq!(r2, 255); + + let r3 = get_bits_descriptor(1024).is_err(); + assert!(r3) + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/bag_of_cells.rs b/token-core/tcx-libs/tonlib-core/src/cell/bag_of_cells.rs new file mode 100644 index 00000000..bb9318c6 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/bag_of_cells.rs @@ -0,0 +1,107 @@ +use std::sync::Arc; + +use base64::engine::general_purpose::STANDARD; + +use crate::cell::raw_boc_from_boc::convert_to_raw_boc; +use crate::cell::*; + +#[derive(PartialEq, Eq, Debug, Clone, Hash)] +pub struct BagOfCells { + pub roots: Vec, +} + +impl BagOfCells { + pub fn new(roots: &[ArcCell]) -> BagOfCells { + BagOfCells { + roots: roots.to_vec(), + } + } + + pub fn from_root(root: Cell) -> BagOfCells { + let arc = Arc::new(root); + BagOfCells { roots: vec![arc] } + } + + pub fn add_root(&mut self, root: Cell) { + let arc = Arc::new(root); + self.roots.push(arc) + } + + pub fn num_roots(&self) -> usize { + self.roots.len() + } + + pub fn root(&self, idx: usize) -> Result<&ArcCell, TonCellError> { + self.roots.get(idx).ok_or_else(|| { + TonCellError::boc_deserialization_error(format!( + "Invalid root index: {}, BoC contains {} roots", + idx, + self.roots.len() + )) + }) + } + + pub fn single_root(&self) -> Result<&ArcCell, TonCellError> { + let root_count = self.roots.len(); + if root_count == 1 { + Ok(&self.roots[0]) + } else { + Err(TonCellError::CellParserError(format!( + "Single root expected, got {}", + root_count + ))) + } + } + + pub fn parse(serial: &[u8]) -> Result { + let raw = RawBagOfCells::parse(serial)?; + let num_cells = raw.cells.len(); + let mut cells: Vec = Vec::with_capacity(num_cells); + + for (cell_index, raw_cell) in raw.cells.into_iter().enumerate().rev() { + let mut references = Vec::with_capacity(raw_cell.references.len()); + for ref_index in &raw_cell.references { + if *ref_index <= cell_index { + return Err(TonCellError::boc_deserialization_error( + "References to previous cells are not supported", + )); + } + references.push(cells[num_cells - 1 - ref_index].clone()); + } + + let cell = Cell::new( + raw_cell.data, + raw_cell.bit_len, + references, + raw_cell.is_exotic, + ) + .map_boc_deserialization_error()?; + cells.push(cell.to_arc()); + } + + let roots = raw + .roots + .into_iter() + .map(|r| &cells[num_cells - 1 - r]) + .map(Arc::clone) + .collect(); + + Ok(BagOfCells { roots }) + } + + pub fn parse_hex(hex: &str) -> Result { + let str: String = hex.chars().filter(|c| !c.is_whitespace()).collect(); + let bin = hex::decode(str.as_str()).map_boc_deserialization_error()?; + Self::parse(&bin) + } + + pub fn parse_base64(base64: &str) -> Result { + let bin = STANDARD.decode(base64).map_boc_deserialization_error()?; + Self::parse(&bin) + } + + pub fn serialize(&self, has_crc32: bool) -> Result, TonCellError> { + let raw = convert_to_raw_boc(self)?; + raw.serialize(has_crc32) + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/builder.rs b/token-core/tcx-libs/tonlib-core/src/cell/builder.rs new file mode 100644 index 00000000..24d6c382 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/builder.rs @@ -0,0 +1,712 @@ +use std::collections::HashMap; +use std::ops::Add; +use std::sync::Arc; + +use bitstream_io::{BigEndian, BitWrite, BitWriter}; +use num_bigint::{BigInt, BigUint, Sign}; +use num_traits::{One, Zero}; + +use crate::cell::dict::{DictBuilder, ValWriter}; +use crate::cell::error::{MapTonCellError, TonCellError}; +use crate::cell::{ArcCell, Cell, CellParser}; +use crate::TonAddress; + +pub(crate) const MAX_CELL_BITS: usize = 1023; +pub(crate) const MAX_CELL_REFERENCES: usize = 4; +pub(crate) const MAX_LEVEL_MASK: u32 = 3; + +pub struct CellBuilder { + bit_writer: BitWriter, BigEndian>, + bits_to_write: usize, + references: Vec, + is_cell_exotic: bool, +} + +#[derive(Clone, Debug, PartialEq, Copy)] +pub enum EitherCellLayout { + Native, + ToRef, + ToCell, +} + +impl CellBuilder { + pub fn new() -> CellBuilder { + let bit_writer = BitWriter::endian(Vec::new(), BigEndian); + CellBuilder { + bit_writer, + bits_to_write: 0, + references: Vec::new(), + is_cell_exotic: false, + } + } + + pub fn set_cell_is_exotic(&mut self, val: bool) { + self.is_cell_exotic = val; + } + + pub fn store_bit(&mut self, val: bool) -> Result<&mut Self, TonCellError> { + self.bit_writer.write_bit(val).map_cell_builder_error()?; + self.bits_to_write += 1; + Ok(self) + } + + pub fn store_u8(&mut self, bit_len: usize, val: u8) -> Result<&mut Self, TonCellError> { + self.bit_writer + .write(bit_len as u32, val) + .map_cell_builder_error()?; + self.bits_to_write += bit_len; + Ok(self) + } + + pub fn store_i8(&mut self, bit_len: usize, val: i8) -> Result<&mut Self, TonCellError> { + self.bit_writer + .write(bit_len as u32, val) + .map_cell_builder_error()?; + self.bits_to_write += bit_len; + Ok(self) + } + + pub fn store_u32(&mut self, bit_len: usize, val: u32) -> Result<&mut Self, TonCellError> { + self.bit_writer + .write(bit_len as u32, val) + .map_cell_builder_error()?; + self.bits_to_write += bit_len; + Ok(self) + } + + pub fn store_i32(&mut self, bit_len: usize, val: i32) -> Result<&mut Self, TonCellError> { + self.bit_writer + .write(bit_len as u32, val) + .map_cell_builder_error()?; + self.bits_to_write += bit_len; + Ok(self) + } + + pub fn store_u64(&mut self, bit_len: usize, val: u64) -> Result<&mut Self, TonCellError> { + self.bit_writer + .write(bit_len as u32, val) + .map_cell_builder_error()?; + self.bits_to_write += bit_len; + Ok(self) + } + + pub fn store_i64(&mut self, bit_len: usize, val: i64) -> Result<&mut Self, TonCellError> { + self.bit_writer + .write(bit_len as u32, val) + .map_cell_builder_error()?; + self.bits_to_write += bit_len; + Ok(self) + } + + pub fn store_uint(&mut self, bit_len: usize, val: &BigUint) -> Result<&mut Self, TonCellError> { + let minimum_bits_needed = if val.is_zero() { 1 } else { val.bits() } as usize; + if minimum_bits_needed > bit_len { + return Err(TonCellError::cell_builder_error(format!( + "Value {} doesn't fit in {} bits (takes {} bits)", + val, bit_len, minimum_bits_needed + ))); + } + + let value_bytes = val.to_bytes_be(); + let first_byte_bit_size = bit_len - (value_bytes.len() - 1) * 8; + + for _ in 0..(first_byte_bit_size - 1) / 32 { + // fill full-bytes padding + self.store_u32(32, 0u32)?; + } + + // fill first byte with required size + if first_byte_bit_size % 32 == 0 { + self.store_u32(32, value_bytes[0] as u32)?; + } else { + self.store_u32(first_byte_bit_size % 32, value_bytes[0] as u32) + .map_cell_builder_error()?; + } + + // fill remaining bytes + for byte in value_bytes.iter().skip(1) { + self.store_u8(8, *byte).map_cell_builder_error()?; + } + Ok(self) + } + + pub fn store_int(&mut self, bit_len: usize, val: &BigInt) -> Result<&mut Self, TonCellError> { + let (sign, mag) = val.clone().into_parts(); + let bit_len = bit_len - 1; // reserve 1 bit for sign + if bit_len < mag.bits() as usize { + return Err(TonCellError::cell_builder_error(format!( + "Value {} doesn't fit in {} bits (takes {} bits)", + val, + bit_len, + mag.bits() + ))); + } + if sign == Sign::Minus { + self.store_byte(1)?; + self.store_uint(bit_len, &extend_and_invert_bits(bit_len, &mag)?)?; + } else { + self.store_byte(0)?; + self.store_uint(bit_len, &mag)?; + }; + Ok(self) + } + + pub fn store_byte(&mut self, val: u8) -> Result<&mut Self, TonCellError> { + self.store_u8(8, val) + } + + pub fn store_slice(&mut self, slice: &[u8]) -> Result<&mut Self, TonCellError> { + for val in slice { + self.store_byte(*val)?; + } + Ok(self) + } + + pub fn store_bits(&mut self, bit_len: usize, slice: &[u8]) -> Result<&mut Self, TonCellError> { + let full_bytes = bit_len / 8; + self.store_slice(&slice[0..full_bytes])?; + let last_byte_len = bit_len % 8; + if last_byte_len != 0 { + let last_byte = slice[full_bytes] >> (8 - last_byte_len); + self.store_u8(last_byte_len, last_byte)?; + } + Ok(self) + } + + pub fn store_string(&mut self, val: &str) -> Result<&mut Self, TonCellError> { + self.store_slice(val.as_bytes()) + } + + pub fn store_coins(&mut self, val: &BigUint) -> Result<&mut Self, TonCellError> { + if val.is_zero() { + self.store_u8(4, 0) + } else { + let num_bytes = (val.bits() as usize + 7) / 8; + self.store_u8(4, num_bytes as u8)?; + self.store_uint(num_bytes * 8, val) + } + } + + /// Stores address without optimizing hole address + pub fn store_raw_address(&mut self, val: &TonAddress) -> Result<&mut Self, TonCellError> { + self.store_u8(2, 0b10u8)?; + self.store_bit(false)?; + let wc = (val.workchain & 0xff) as u8; + self.store_u8(8, wc)?; + self.store_slice(&val.hash_part)?; + Ok(self) + } + + /// Stores address optimizing hole address two to bits + pub fn store_address(&mut self, val: &TonAddress) -> Result<&mut Self, TonCellError> { + if val == &TonAddress::NULL { + self.store_u8(2, 0)?; + } else { + self.store_raw_address(val)?; + } + Ok(self) + } + + /// Adds reference to an existing `Cell`. + /// + /// The reference is passed as `ArcCell` so it might be references from other cells. + pub fn store_reference(&mut self, cell: &ArcCell) -> Result<&mut Self, TonCellError> { + let ref_count = self.references.len() + 1; + if ref_count > 4 { + return Err(TonCellError::cell_builder_error(format!( + "Cell must contain at most 4 references, got {}", + ref_count + ))); + } + self.references.push(cell.clone()); + Ok(self) + } + + pub fn store_references(&mut self, refs: &[ArcCell]) -> Result<&mut Self, TonCellError> { + for r in refs { + self.store_reference(r)?; + } + Ok(self) + } + + /// Adds a reference to a newly constructed `Cell`. + /// + /// The cell is wrapped it the `Arc`. + pub fn store_child(&mut self, cell: Cell) -> Result<&mut Self, TonCellError> { + self.store_reference(&Arc::new(cell)) + } + + pub fn store_remaining_bits( + &mut self, + parser: &mut CellParser, + ) -> Result<&mut Self, TonCellError> { + let num_full_bytes = parser.remaining_bits() / 8; + let bytes = parser.load_bytes(num_full_bytes)?; + self.store_slice(bytes.as_slice())?; + let num_bits = parser.remaining_bits() % 8; + let tail = parser.load_u8(num_bits)?; + self.store_u8(num_bits, tail)?; + Ok(self) + } + + pub fn store_cell_data(&mut self, cell: &Cell) -> Result<&mut Self, TonCellError> { + let mut parser = cell.parser(); + self.store_remaining_bits(&mut parser)?; + Ok(self) + } + + pub fn store_cell(&mut self, cell: &Cell) -> Result<&mut Self, TonCellError> { + self.store_cell_data(cell)?; + self.store_references(cell.references.as_slice())?; + Ok(self) + } + + // https://docs.ton.org/develop/data-formats/tl-b-types#either + pub fn store_either_cell_or_cell_ref( + &mut self, + cell: &ArcCell, + layout: EitherCellLayout, + ) -> Result<&mut Self, TonCellError> { + match layout { + EitherCellLayout::Native => { + if cell.bit_len() < self.remaining_bits() { + self.store_bit(false)?; + self.store_cell(cell)?; + } else { + self.store_bit(true)?; + self.store_reference(cell)?; + } + } + EitherCellLayout::ToRef => { + self.store_bit(true)?; + self.store_reference(cell)?; + } + EitherCellLayout::ToCell => { + self.store_bit(false)?; + self.store_cell(cell)?; + } + } + + Ok(self) + } + + // https://docs.ton.org/develop/data-formats/tl-b-types#maybe + pub fn store_maybe_cell_ref( + &mut self, + maybe_cell: &Option, + ) -> Result<&mut Self, TonCellError> { + if let Some(cell) = maybe_cell { + self.store_bit(true)?; + self.store_reference(cell)?; + } else { + self.store_bit(false)?; + } + + Ok(self) + } + + pub fn store_dict_data( + &mut self, + key_len_bits: usize, + value_writer: ValWriter, + data: HashMap, + ) -> Result<&mut Self, TonCellError> + where + BigUint: From, + { + let dict_builder = DictBuilder::new(key_len_bits, value_writer, data)?; + let dict_cell = dict_builder.build()?; + self.store_cell(&dict_cell) + } + + pub fn store_dict( + &mut self, + key_len_bits: usize, + value_writer: ValWriter, + data: HashMap, + ) -> Result<&mut Self, TonCellError> + where + BigUint: From, + { + if data.is_empty() { + self.store_bit(false) + } else { + self.store_bit(true)?; + + let dict_data = Arc::new( + CellBuilder::new() + .store_dict_data(key_len_bits, value_writer, data)? + .build()?, + ); + self.store_reference(&dict_data) + } + } + + pub fn remaining_bits(&self) -> usize { + MAX_CELL_BITS - self.bits_to_write + } + + pub fn build(&mut self) -> Result { + let mut trailing_zeros = 0; + while !self.bit_writer.byte_aligned() { + self.bit_writer.write_bit(false).map_cell_builder_error()?; + trailing_zeros += 1; + } + + if let Some(vec) = self.bit_writer.writer() { + let bit_len = vec.len() * 8 - trailing_zeros; + if bit_len > MAX_CELL_BITS { + return Err(TonCellError::cell_builder_error(format!( + "Cell must contain at most {} bits, got {}", + MAX_CELL_BITS, bit_len + ))); + } + let ref_count = self.references.len(); + if ref_count > MAX_CELL_REFERENCES { + return Err(TonCellError::cell_builder_error(format!( + "Cell must contain at most 4 references, got {}", + ref_count + ))); + } + + Cell::new( + vec.clone(), + bit_len, + self.references.clone(), + self.is_cell_exotic, + ) + } else { + Err(TonCellError::CellBuilderError( + "Stream is not byte-aligned".to_string(), + )) + } + } +} + +fn extend_and_invert_bits(bits_cnt: usize, src: &BigUint) -> Result { + if bits_cnt < src.bits() as usize { + return Err(TonCellError::cell_builder_error(format!( + "Can't invert bits: value {} doesn't fit in {} bits", + src, bits_cnt + ))); + } + + let src_bytes = src.to_bytes_be(); + let inverted_bytes_cnt = (bits_cnt + 7) / 8; + let mut inverted = vec![0xffu8; inverted_bytes_cnt]; + // can be optimized + for (pos, byte) in src_bytes.iter().rev().enumerate() { + let inverted_pos = inverted.len() - 1 - pos; + inverted[inverted_pos] ^= byte; + } + let mut inverted_val_bytes = BigUint::from_bytes_be(&inverted) + .add(BigUint::one()) + .to_bytes_be(); + let leading_zeros = inverted_bytes_cnt * 8 - bits_cnt; + inverted_val_bytes[0] &= 0xffu8 >> leading_zeros; + Ok(BigUint::from_bytes_be(&inverted_val_bytes)) +} + +impl Default for CellBuilder { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + use std::str::FromStr; + + use num_bigint::{BigInt, BigUint, Sign}; + use num_traits::Zero; + + use crate::cell::builder::extend_and_invert_bits; + use crate::cell::dict::predefined_readers::{key_reader_u8, val_reader_uint}; + use crate::cell::{CellBuilder, TonCellError}; + use crate::types::TonAddress; + + #[test] + fn test_extend_and_invert_bits() -> Result<(), TonCellError> { + let a = BigUint::from(1u8); + let b = extend_and_invert_bits(8, &a)?; + println!("a: {:0x}", a); + println!("b: {:0x}", b); + assert_eq!(b, BigUint::from(0xffu8)); + + let b = extend_and_invert_bits(16, &a)?; + assert_eq!(b, BigUint::from_slice(&[0xffffu32])); + + let b = extend_and_invert_bits(20, &a)?; + assert_eq!(b, BigUint::from_slice(&[0xfffffu32])); + + let b = extend_and_invert_bits(8, &a)?; + assert_eq!(b, BigUint::from_slice(&[0xffu32])); + + let b = extend_and_invert_bits(9, &a)?; + assert_eq!(b, BigUint::from_slice(&[0x1ffu32])); + + assert!(extend_and_invert_bits(3, &BigUint::from(10u32)).is_err()); + Ok(()) + } + + #[test] + fn write_bit() -> Result<(), TonCellError> { + let mut writer = CellBuilder::new(); + let cell = writer.store_bit(true)?.build()?; + assert_eq!(cell.data, [0b1000_0000]); + assert_eq!(cell.bit_len, 1); + let mut reader = cell.parser(); + let result = reader.load_bit()?; + assert!(result); + Ok(()) + } + + #[test] + fn write_u8() -> Result<(), TonCellError> { + let value = 234u8; + let mut writer = CellBuilder::new(); + let cell = writer.store_u8(8, value)?.build()?; + assert_eq!(cell.data, [0b1110_1010]); + assert_eq!(cell.bit_len, 8); + let mut reader = cell.parser(); + let result = reader.load_u8(8)?; + assert_eq!(result, value); + Ok(()) + } + + #[test] + fn write_u32() -> Result<(), TonCellError> { + let value = 0xFAD45AADu32; + let mut writer = CellBuilder::new(); + let cell = writer.store_u32(32, value)?.build()?; + assert_eq!(cell.data, [0xFA, 0xD4, 0x5A, 0xAD]); + assert_eq!(cell.bit_len, 32); + let mut reader = cell.parser(); + let result = reader.load_u32(32)?; + assert_eq!(result, value); + Ok(()) + } + + #[test] + fn write_u64() -> Result<(), TonCellError> { + let value = 0xFAD45AADAA12FF45; + let mut writer = CellBuilder::new(); + let cell = writer.store_u64(64, value)?.build()?; + assert_eq!(cell.data, [0xFA, 0xD4, 0x5A, 0xAD, 0xAA, 0x12, 0xFF, 0x45]); + assert_eq!(cell.bit_len, 64); + let mut reader = cell.parser(); + let result = reader.load_u64(64)?; + assert_eq!(result, value); + Ok(()) + } + + #[test] + fn write_slice() -> Result<(), TonCellError> { + let value = [0xFA, 0xD4, 0x5A, 0xAD, 0xAA, 0x12, 0xFF, 0x45]; + let mut writer = CellBuilder::new(); + let cell = writer.store_slice(&value)?.build()?; + assert_eq!(cell.data, value); + assert_eq!(cell.bit_len, 64); + let mut reader = cell.parser(); + let bytes = reader.load_bytes(8)?; + assert_eq!(bytes, value); + Ok(()) + } + + #[test] + fn write_str() -> Result<(), TonCellError> { + let texts = ["hello", "Русский текст", "中华人民共和国", "\u{263A}😃"]; + for text in texts { + let mut writer = CellBuilder::new(); + let cell = writer.store_string(text)?.build()?; + let text_bytes = text.as_bytes(); + assert_eq!(cell.data, text_bytes); + assert_eq!(cell.bit_len, text_bytes.len() * 8); + let mut reader = cell.parser(); + let remaining_bytes = reader.remaining_bytes(); + let result = reader.load_utf8(remaining_bytes)?; + assert_eq!(result, text); + } + Ok(()) + } + + #[test] + fn write_address() -> Result<(), TonCellError> { + let addr = TonAddress::from_base64_url("EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjrR") + .unwrap(); + + let mut writer = CellBuilder::new(); + let cell = writer.store_address(&addr)?.build()?; + assert_eq!( + cell.data, + [ + 128, 28, 155, 42, 157, 243, 233, 194, 74, 20, 77, 107, 119, 90, 237, 67, 155, 162, + 249, 250, 17, 117, 117, 173, 233, 132, 124, 110, 68, 225, 93, 237, 238, 192 + ] + ); + assert_eq!(cell.bit_len, 2 + 1 + 8 + 32 * 8); + let mut reader = cell.parser(); + let result = reader.load_address()?; + assert_eq!(result, addr); + Ok(()) + } + + #[test] + fn write_big_int() -> Result<(), TonCellError> { + let value = BigInt::from_str("3").unwrap(); + let mut writer = CellBuilder::new(); + writer.store_int(33, &value)?; + let cell = writer.build()?; + println!("cell: {:?}", cell); + let written = BigInt::from_bytes_be(Sign::Plus, &cell.data); + assert_eq!(written, value); + + // 256 bits (+ sign) + let value = BigInt::from_str( + "97887266651548624282413032824435501549503168134499591480902563623927645013201", + ) + .unwrap(); + let mut writer = CellBuilder::new(); + writer.store_int(257, &value)?; + let cell = writer.build()?; + println!("cell: {:?}", cell); + let written = BigInt::from_bytes_be(Sign::Plus, &cell.data); + assert_eq!(written, value); + + let value = BigInt::from_str("-5").unwrap(); + let mut writer = CellBuilder::new(); + writer.store_int(5, &value)?; + let cell = writer.build()?; + println!("cell: {:?}", cell); + let written = BigInt::from_bytes_be(Sign::Plus, &cell.data[1..]); + let expected = BigInt::from_bytes_be(Sign::Plus, &[0xB0u8]); + assert_eq!(written, expected); + Ok(()) + } + + #[test] + fn write_load_big_uint() -> Result<(), TonCellError> { + let value = BigUint::from_str("3").unwrap(); + let mut writer = CellBuilder::new(); + assert!(writer.store_uint(1, &value).is_err()); + let bits_for_tests = [256, 128, 64, 8]; + + for bits_num in bits_for_tests.iter() { + writer.store_uint(*bits_num, &value)?; + } + let cell = writer.build()?; + println!("cell: {:?}", cell); + let mut cell_parser = cell.parser(); + for bits_num in bits_for_tests.iter() { + let written_value = cell_parser.load_uint(*bits_num)?; + assert_eq!(written_value, value); + } + + // 256 bit + let value = BigUint::from_str( + "97887266651548624282413032824435501549503168134499591480902563623927645013201", + ) + .unwrap(); + let mut writer = CellBuilder::new(); + assert!(writer.store_uint(255, &value).is_err()); + let bits_for_tests = [496, 264, 256]; + for bits_num in bits_for_tests.iter() { + writer.store_uint(*bits_num, &value)?; + } + let cell = writer.build()?; + let mut cell_parser = cell.parser(); + println!("cell: {:?}", cell); + for bits_num in bits_for_tests.iter() { + let written_value = cell_parser.load_uint(*bits_num)?; + assert_eq!(written_value, value); + } + + Ok(()) + } + + #[test] + fn test_padding() -> Result<(), TonCellError> { + let mut writer = CellBuilder::new(); + + let n = BigUint::from(0x55a5f0f0u32); + + writer.store_uint(32, &BigUint::zero())?; + writer.store_uint(32, &n)?; + writer.store_uint(31, &BigUint::zero())?; + writer.store_uint(31, &n)?; + writer.store_uint(35, &BigUint::zero())?; + writer.store_uint(35, &n)?; + let cell = writer.build()?; + + println!("{:?}", cell); + assert_eq!(cell.data.len(), 25); + assert_eq!(cell.bit_len, 196); + + let mut parser = cell.parser(); + let result_zero = parser.load_uint(32)?; + let result_test_num = parser.load_uint(32)?; + + assert_eq!(result_zero, BigUint::zero()); + assert_eq!(result_test_num, n); + let result_zero = parser.load_uint(31)?; + let result_test_num = parser.load_uint(31)?; + + assert_eq!(result_zero, BigUint::zero()); + assert_eq!(result_test_num, n); + let result_zero = parser.load_uint(35)?; + let result_test_num = parser.load_uint(35)?; + + assert_eq!(result_zero, BigUint::zero()); + + assert_eq!(result_test_num, n); + parser.ensure_empty()?; + + Ok(()) + } + + #[test] + fn test_zero_alone() -> Result<(), TonCellError> { + let bitlens_to_test = [ + 1, 7, 8, 9, 30, 31, 32, 33, 127, 128, 129, 255, 256, 257, 300, + ]; + for bitlen in bitlens_to_test { + let mut writer = CellBuilder::new(); + writer.store_uint(bitlen, &BigUint::zero())?; + + let cell = writer.build()?; + + println!("{:?}", cell); + let taeget_bytelen = (bitlen + 7) / 8; + assert_eq!(cell.data.len(), taeget_bytelen); + + assert_eq!(cell.bit_len, bitlen); + + let mut parser = cell.parser(); + let result_zero = parser.load_uint(bitlen)?; + + assert_eq!(result_zero, BigUint::zero()); + parser.ensure_empty()?; + } + Ok(()) + } + + #[test] + fn test_store_dict() -> Result<(), TonCellError> { + let mut builder = CellBuilder::new(); + let mut data = HashMap::new(); + data.insert(1u8, BigUint::from(2u8)); + data.insert(3u8, BigUint::from(4u8)); + + let value_writer = |writer: &mut CellBuilder, value: BigUint| { + writer.store_uint(8, &value)?; + Ok(()) + }; + builder.store_dict(8, value_writer, data.clone())?; + let cell = builder.build()?; + let mut parser = cell.parser(); + let parsed = parser.load_dict(8, key_reader_u8, val_reader_uint)?; + assert_eq!(data, parsed); + Ok(()) + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/cell_type.rs b/token-core/tcx-libs/tonlib-core/src/cell/cell_type.rs new file mode 100644 index 00000000..27fefa58 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/cell_type.rs @@ -0,0 +1,381 @@ +use std::cmp::PartialEq; +use std::io; +use std::io::Cursor; + +use bitstream_io::{BigEndian, ByteRead, ByteReader}; + +use crate::cell::level_mask::LevelMask; +use crate::cell::{ArcCell, Cell, MapTonCellError, TonCellError, DEPTH_BYTES, MAX_LEVEL}; +use crate::types::{TON_HASH_BYTES, ZERO_HASH}; +use crate::TonHash; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] +pub(crate) enum CellType { + #[default] + Ordinary, + PrunedBranch, + Library, + MerkleProof, + MerkleUpdate, +} + +#[derive(Debug, Clone)] +struct Pruned { + hash: TonHash, + depth: u16, +} + +impl CellType { + pub(crate) fn determine_exotic_cell_type(data: &[u8]) -> Result { + let Some(type_byte) = data.first() else { + return Err(TonCellError::InvalidExoticCellData( + "Not enough data for an exotic cell".to_owned(), + )); + }; + + let cell_type = match type_byte { + 1 => CellType::PrunedBranch, + 2 => CellType::Library, + 3 => CellType::MerkleProof, + 4 => CellType::MerkleUpdate, + cell_type => { + return Err(TonCellError::InvalidExoticCellData(format!( + "Invalid first byte in exotic cell data: {}", + cell_type + ))) + } + }; + Ok(cell_type) + } + + pub(crate) fn validate( + &self, + data: &[u8], + bit_len: usize, + references: impl AsRef<[ArcCell]>, + ) -> Result<(), TonCellError> { + match self { + CellType::Ordinary => Ok(()), + CellType::PrunedBranch => self.validate_exotic_pruned(data, bit_len, references), + CellType::Library => self.validate_library(bit_len), + CellType::MerkleProof => self.validate_merkle_proof(data, bit_len, references), + CellType::MerkleUpdate => self.validate_merkle_update(data, bit_len, references), + } + } + + pub(crate) fn level_mask( + &self, + cell_data: &[u8], + cell_data_bit_len: usize, + references: &[ArcCell], + ) -> Result { + let result = match self { + CellType::Ordinary => references + .iter() + .fold(LevelMask::new(0), |level_mask, reference| { + level_mask.apply_or(reference.level_mask) + }), + CellType::PrunedBranch => self.pruned_level_mask(cell_data, cell_data_bit_len)?, + CellType::Library => LevelMask::new(0), + CellType::MerkleProof => references[0].level_mask.shift_right(), + CellType::MerkleUpdate => references[0] + .level_mask + .apply_or(references[1].level_mask) + .shift_right(), + }; + + Ok(result) + } + + pub(crate) fn child_depth(&self, child: &Cell, level: u8) -> u16 { + if matches!(self, CellType::MerkleProof | CellType::MerkleUpdate) { + child.get_depth(level + 1) + } else { + child.get_depth(level) + } + } + + pub(crate) fn resolve_hashes_and_depths( + &self, + hashes: Vec, + depths: Vec, + data: &[u8], + bit_len: usize, + level_mask: LevelMask, + ) -> Result<([TonHash; 4], [u16; 4]), TonCellError> { + let mut resolved_hashes = [ZERO_HASH; 4]; + let mut resolved_depths = [0; 4]; + + for i in 0..4 { + let hash_index = level_mask.apply(i).hash_index(); + + let (hash, depth) = if self == &CellType::PrunedBranch { + let this_hash_index = level_mask.hash_index(); + if hash_index != this_hash_index { + let pruned = self + .pruned(data, bit_len, level_mask) + .map_cell_builder_error()?; + (pruned[hash_index].hash, pruned[hash_index].depth) + } else { + (hashes[0], depths[0]) + } + } else { + (hashes[hash_index], depths[hash_index]) + }; + + resolved_hashes[i as usize] = hash; + resolved_depths[i as usize] = depth; + } + + Ok((resolved_hashes, resolved_depths)) + } + + fn validate_exotic_pruned( + &self, + data: &[u8], + bit_len: usize, + references: impl AsRef<[ArcCell]>, + ) -> Result<(), TonCellError> { + if !references.as_ref().is_empty() { + return Err(TonCellError::InvalidExoticCellData(format!( + "Pruned Branch cell can't have refs, got {}", + references.as_ref().len() + ))); + } + + if bit_len < 16 { + return Err(TonCellError::InvalidExoticCellData( + "Not enough data for a PrunnedBranch special cell".to_owned(), + )); + } + + if !self.is_config_proof(bit_len) { + let level_mask = self.pruned_level_mask(data, bit_len)?; + let level = level_mask.level(); + + if level == 0 || level > MAX_LEVEL { + return Err(TonCellError::InvalidExoticCellData(format!( + "Pruned Branch cell level must be >= 1 and <= 3, got {}/{}", + level_mask.level(), + level_mask.mask() + ))); + } + + let expected_size: usize = + (2 + level_mask.apply(level - 1).hash_count() * (TON_HASH_BYTES + DEPTH_BYTES)) * 8; + + if bit_len != expected_size { + return Err(TonCellError::InvalidExoticCellData(format!( + "Pruned branch cell must have exactly {expected_size} bits, got {bit_len}" + ))); + } + } + + Ok(()) + } + + fn validate_library(&self, bit_len: usize) -> Result<(), TonCellError> { + const SIZE: usize = (1 + TON_HASH_BYTES) * 8; + + if bit_len != SIZE { + return Err(TonCellError::InvalidExoticCellData(format!( + "Pruned branch cell must have exactly {SIZE} bits, got {bit_len}" + ))); + } + + Ok(()) + } + + fn validate_merkle_proof( + &self, + data: &[u8], + bit_len: usize, + references: impl AsRef<[ArcCell]>, + ) -> Result<(), TonCellError> { + let references = references.as_ref(); + // type + hash + depth + const SIZE: usize = (1 + TON_HASH_BYTES + DEPTH_BYTES) * 8; + + if bit_len != SIZE { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Proof cell must have exactly (8 + 256 + 16) bits, got {bit_len}" + ))); + } + + if references.as_ref().len() != 1 { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Proof cell must have exactly 1 ref, got {}", + references.as_ref().len() + ))); + } + + let proof_hash: [u8; TON_HASH_BYTES] = + data[1..(1 + TON_HASH_BYTES)].try_into().map_err(|err| { + TonCellError::InvalidExoticCellData(format!( + "Can't get proof hash bytes from cell data, {}", + err + )) + })?; + let proof_depth_bytes = data[(1 + TON_HASH_BYTES)..(1 + TON_HASH_BYTES + 2)] + .try_into() + .map_err(|err| { + TonCellError::InvalidExoticCellData(format!( + "Can't get proof depth bytes from cell data, {}", + err + )) + })?; + let proof_depth = u16::from_be_bytes(proof_depth_bytes); + let ref_hash = references[0].get_hash(0); + let ref_depth = references[0].get_depth(0); + + if proof_depth != ref_depth { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Proof cell ref depth must be exactly {proof_depth}, got {ref_depth}" + ))); + } + + if proof_hash != ref_hash { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Proof cell ref hash must be exactly {proof_hash:?}, got {ref_hash:?}" + ))); + } + + Ok(()) + } + + fn validate_merkle_update( + &self, + data: &[u8], + bit_len: usize, + references: impl AsRef<[ArcCell]>, + ) -> Result<(), TonCellError> { + let references = references.as_ref(); + // type + hash + hash + depth + depth + const SIZE: usize = 8 + (2 * (256 + 16)); + + if bit_len != SIZE { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Update cell must have exactly (8 + 256 + 16) bits, got {bit_len}" + ))); + } + + if references.len() != 2 { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Update cell must have exactly 2 refs, got {}", + references.len() + ))); + } + + let proof_hash1: TonHash = data[1..33].try_into().map_err(|err| { + TonCellError::InvalidExoticCellData(format!( + "Can't get proof hash bytes 1 from cell data, {}", + err + )) + })?; + let proof_hash2: TonHash = data[33..65].try_into().map_err(|err| { + TonCellError::InvalidExoticCellData(format!( + "Can't get proof hash bytes 2 from cell data, {}", + err + )) + })?; + let proof_depth_bytes1 = data[65..67].try_into().map_err(|err| { + TonCellError::InvalidExoticCellData(format!( + "Can't get proof depth bytes 1 from cell data, {}", + err + )) + })?; + let proof_depth_bytes2 = data[67..69].try_into().map_err(|err| { + TonCellError::InvalidExoticCellData(format!( + "Can't get proof depth bytes 2 from cell data, {}", + err + )) + })?; + let proof_depth1 = u16::from_be_bytes(proof_depth_bytes1); + let proof_depth2 = u16::from_be_bytes(proof_depth_bytes2); + + let ref_hash1 = references[0].get_hash(0); + let ref_depth1 = references[0].get_depth(0); + let ref_hash2 = references[1].get_hash(0); + let ref_depth2 = references[1].get_depth(0); + + if proof_depth1 != ref_depth1 { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Proof cell ref depth 1 must be exactly {proof_depth1}, got {ref_depth1}" + ))); + } + + if proof_hash1 != ref_hash1 { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Proof cell ref hash 1 must be exactly {proof_hash1:?}, got {ref_hash1:?}" + ))); + } + + if proof_depth2 != ref_depth2 { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Proof cell ref depth 2 must be exactly {proof_depth2}, got {ref_depth2}" + ))); + } + + if proof_hash2 != ref_hash2 { + return Err(TonCellError::InvalidExoticCellData(format!( + "Merkle Proof cell ref hash 2 must be exactly {proof_hash2:?}, got {ref_hash2:?}" + ))); + } + + Ok(()) + } + + fn pruned_level_mask(&self, data: &[u8], bit_len: usize) -> Result { + if data.len() < 5 { + return Err(TonCellError::InvalidExoticCellData(format!( + "Pruned Branch cell date can't be shorter than 5 bytes, got {}", + data.len() + ))); + } + + let level_mask = if self.is_config_proof(bit_len) { + LevelMask::new(1) + } else { + let mask_byte = data[1]; + LevelMask::new(mask_byte as u32) + }; + + Ok(level_mask) + } + + fn pruned( + &self, + data: &[u8], + bit_len: usize, + level_mask: LevelMask, + ) -> Result, io::Error> { + let current_index = if self.is_config_proof(bit_len) { 1 } else { 2 }; + + let cursor = Cursor::new(&data[current_index..]); + let mut reader = ByteReader::endian(cursor, BigEndian); + + let level = level_mask.level() as usize; + let hashes = (0..level) + .map(|_| reader.read::()) + .collect::, _>>()?; + let depths = (0..level) + .map(|_| reader.read::()) + .collect::, _>>()?; + + let result = hashes + .into_iter() + .zip(depths) + .map(|(hash, depth)| Pruned { depth, hash }) + .collect(); + + Ok(result) + } + + /// Special case for config proof + /// This test proof is generated in the moment of voting for a slashing + /// it seems that tools generate it incorrectly and therefore doesn't have mask in it + /// so we need to hardcode it equal to 1 in this case + fn is_config_proof(&self, bit_len: usize) -> bool { + self == &CellType::PrunedBranch && bit_len == 280 + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/dict.rs b/token-core/tcx-libs/tonlib-core/src/cell/dict.rs new file mode 100644 index 00000000..c7d96def --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/dict.rs @@ -0,0 +1,13 @@ +mod builder; +mod leading_bit_utils; +mod parser; +pub mod predefined_readers; +pub mod predefined_writers; +mod types; + +pub(crate) use builder::DictBuilder; +pub(crate) use parser::DictParser; +pub use types::{KeyReader, SnakeFormatDict, ValReader, ValWriter}; + +#[cfg(test)] +mod tests; diff --git a/token-core/tcx-libs/tonlib-core/src/cell/dict/builder.rs b/token-core/tcx-libs/tonlib-core/src/cell/dict/builder.rs new file mode 100644 index 00000000..e9099ccf --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/dict/builder.rs @@ -0,0 +1,197 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use num_bigint::BigUint; +use num_traits::{One, Zero}; + +use super::leading_bit_utils::{ + add_leading_bit, all_bits_same, common_prefix_len, remove_leading_bit, +}; +use super::types::LabelType; +use crate::cell::dict::ValWriter; +use crate::cell::TonCellError::InvalidInput; +use crate::cell::{Cell, CellBuilder, TonCellError}; + +pub(crate) struct DictBuilder { + value_writer: ValWriter, + data: HashMap, + keys_sorted: Vec, // keys contain 1 extra leading bit set to 1 + key_len_bits_left: usize, +} + +impl DictBuilder { + pub(crate) fn new( + key_len_bits: usize, + value_writer: ValWriter, + data: HashMap, + ) -> Result + where + BigUint: From, + { + let prepared_data = update_keys(key_len_bits, data)?; + let mut keys: Vec<_> = prepared_data.keys().cloned().collect(); + keys.sort(); + + let builder = DictBuilder { + value_writer, + data: prepared_data, + keys_sorted: keys, + key_len_bits_left: key_len_bits, + }; + Ok(builder) + } + + pub(crate) fn build(mut self) -> Result { + let mut builder = CellBuilder::new(); + if self.data.is_empty() { + return builder.build(); + } + let keys = self.keys_sorted.iter().cloned().enumerate().collect(); + self.fill_cell(&mut builder, keys)?; + builder.build() + } + + // keys: Vec<(original_key_position, remaining_key_part)> + fn fill_cell( + &mut self, + builder: &mut CellBuilder, + keys: Vec<(usize, BigUint)>, + ) -> Result<(), TonCellError> { + if keys.len() == 1 { + let (orig_key_pos, remaining_key) = &keys[0]; + return self.store_leaf(builder, *orig_key_pos, remaining_key); + } + + // will restore it at the end + let key_len_bits_left_original = self.key_len_bits_left; + + let key = &keys[0].1; + let key_len = key.bits() as usize; // includes leading bit + + let common_prefix_len = common_prefix_len(key, &keys.last().unwrap().1); + let label = { + let ignored_suffix_len = key_len - common_prefix_len - 1; + key >> ignored_suffix_len + }; + self.store_label(builder, &label)?; + + let mut left_keys = Vec::with_capacity(keys.len() / 2); + let mut right_keys = Vec::with_capacity(keys.len() / 2); + + let new_key_len = key_len - common_prefix_len - 1; + let new_key_mask = (BigUint::one() << new_key_len) - 1u32; + for (pos, key) in keys { + let new_key = key & new_key_mask.clone(); + let is_right = new_key.bits() as usize == new_key_len; + let new_key_internal = add_leading_bit(&new_key, new_key_len - 1); + if is_right { + right_keys.push((pos, new_key_internal)); + } else { + left_keys.push((pos, new_key_internal)); + } + } + + self.key_len_bits_left -= common_prefix_len + 1; // branch consumes 1 more bit + let mut left_builder = CellBuilder::new(); + self.fill_cell(&mut left_builder, left_keys)?; + builder.store_reference(&Arc::new(left_builder.build()?))?; + + let mut right_builder = CellBuilder::new(); + self.fill_cell(&mut right_builder, right_keys)?; + builder.store_reference(&Arc::new(right_builder.build()?))?; + + self.key_len_bits_left = key_len_bits_left_original; + Ok(()) + } + + fn store_leaf( + &mut self, + builder: &mut CellBuilder, + orig_key_pos: usize, + label: &BigUint, + ) -> Result<(), TonCellError> { + self.store_label(builder, label)?; + let origin_key = &self.keys_sorted[orig_key_pos]; + let value = self.data.remove(origin_key).unwrap(); + (self.value_writer)(builder, value)?; + Ok(()) + } + + // expect label with leading one + fn store_label(&self, builder: &mut CellBuilder, label: &BigUint) -> Result<(), TonCellError> { + assert!(label.bits() > 0); + if label.is_one() { + // it's leading bit => label_type == short, len == 0 => store [false, false] + builder.store_u8(2, 0)?; + return Ok(()); + } + let all_bits_same = all_bits_same(label); + + let label_len = label.bits() as usize - 1; + let label_len_len = (self.key_len_bits_left as f32 + 1.0).log2().ceil() as usize; + let fair_label = remove_leading_bit(label); + let same_label_len = if all_bits_same { + 3 + label_len_len + } else { + usize::MAX + }; + let short_label_len = 2 + label_len * 2; + let long_label_len = 2 + label_len_len + label_len; + + let mut label_type = LabelType::Short; + if long_label_len < short_label_len { + label_type = LabelType::Long; + } + if same_label_len < short_label_len { + label_type = LabelType::Same; + } + match label_type { + LabelType::Same => { + builder.store_bit(true)?; + builder.store_bit(true)?; + builder.store_bit(!fair_label.is_zero())?; + builder.store_u32(label_len_len, label_len as u32)?; + } + LabelType::Short => { + builder.store_bit(false)?; + for _ in 0..label_len { + builder.store_bit(true)?; + } + builder.store_bit(false)?; + builder.store_uint(label_len, &fair_label)?; + } + LabelType::Long => { + builder.store_bit(true)?; + builder.store_bit(false)?; + builder.store_u32(label_len_len, label_len as u32)?; + builder.store_uint(label_len, &fair_label)?; + } + } + Ok(()) + } +} + +fn update_keys( + key_len_bits: usize, + data: HashMap, +) -> Result, TonCellError> +where + BigUint: From, +{ + let mut result = HashMap::new(); + + for (key, val) in data { + let key_big = BigUint::from(key); + let received_len_bits = key_big.bits(); + if received_len_bits as usize > key_len_bits { + let msg = format!( + "Invalid key length: Expected max_len={key_len_bits}, got len={received_len_bits}" + ); + return Err(InvalidInput(msg)); + } + // add leading bit to maintain proper bits length + let internal_key = add_leading_bit(&key_big, key_len_bits); + result.insert(internal_key, val); + } + Ok(result) +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/dict/leading_bit_utils.rs b/token-core/tcx-libs/tonlib-core/src/cell/dict/leading_bit_utils.rs new file mode 100644 index 00000000..d67c6c56 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/dict/leading_bit_utils.rs @@ -0,0 +1,84 @@ +use num_bigint::BigUint; +use num_traits::{One, Zero}; + +/// All functions except `add_leading_bit` expect 1 extra leading bit in `val` set to 1 + +pub(super) fn all_bits_same(val: &BigUint) -> bool { + if val.is_zero() { + return true; + } + let origin_bits = val.bits(); + let all_zero = (val - 1u32).bits() != origin_bits; + let all_ones = (val + 1u32).bits() != origin_bits; + all_zero || all_ones +} + +pub(super) fn common_prefix_len(a: &BigUint, b: &BigUint) -> usize { + let xor = a ^ b; + (a.bits() - xor.bits() - 1) as usize // don't forget leading zero +} + +pub(super) fn remove_leading_bit(val: &BigUint) -> BigUint { + let bits = val.bits(); + let mask = BigUint::one() << (bits - 1); + val ^ mask +} + +pub(super) fn add_leading_bit(val: &BigUint, val_bit_len: usize) -> BigUint { + let leading_bit = BigUint::one() << val_bit_len; + leading_bit | val +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_all_bits_same() { + for val in [ + BigUint::from(0u32), + BigUint::from(0b1111u32), + BigUint::from(0b1000u32), + ] { + assert!(all_bits_same(&val)); + } + + let val = BigUint::from(0b1011u32); + assert!(!all_bits_same(&val)); + } + + #[test] + fn test_common_prefix_len() { + let a = BigUint::from(0b1011u32); + let b = BigUint::from(0b1010u32); + assert_eq!(common_prefix_len(&a, &b), 2); + + let a = BigUint::from(0b1011u32); + let b = BigUint::from(0b1011u32); + assert_eq!(common_prefix_len(&a, &b), 3); + } + + #[test] + fn test_remove_leading_bit() { + let val = BigUint::from(0b1011u32); + assert_eq!(remove_leading_bit(&val), BigUint::from(0b011u32)); + + let val = BigUint::from(0b1111u32); + assert_eq!(remove_leading_bit(&val), BigUint::from(0b111u32)); + + let val = BigUint::from(0b1u32); + assert_eq!(remove_leading_bit(&val), BigUint::from(0u32)); + } + + #[test] + fn test_add_leading_bit() { + let val = BigUint::from(0b1011u32); + assert_eq!(add_leading_bit(&val, 4), BigUint::from(0b11011u32)); + + let val = BigUint::from(0b1111u32); + assert_eq!(add_leading_bit(&val, 4), BigUint::from(0b11111u32)); + + let val = BigUint::from(0u32); + assert_eq!(add_leading_bit(&val, 1), BigUint::from(0b10u32)); + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/dict/parser.rs b/token-core/tcx-libs/tonlib-core/src/cell/dict/parser.rs new file mode 100644 index 00000000..6927d6dc --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/dict/parser.rs @@ -0,0 +1,125 @@ +use std::collections::HashMap; +use std::hash::Hash; + +use num_bigint::BigUint; +use num_traits::{One, ToPrimitive}; + +use super::types::LabelType; +use crate::cell::dict::{KeyReader, ValReader}; +use crate::cell::TonCellError::InvalidInput; +use crate::cell::{CellParser, TonCellError}; + +pub(crate) struct DictParser { + key_len_bits: usize, + key_reader: KeyReader, + val_reader: ValReader, + cur_key_prefix: BigUint, // store leading 1 to determinate len properly +} + +impl DictParser { + pub(crate) fn new( + key_len_bits: usize, + key_reader: KeyReader, + val_reader: ValReader, + ) -> DictParser { + DictParser { + key_len_bits, + key_reader, + val_reader, + cur_key_prefix: BigUint::one(), + } + } + + pub(crate) fn parse(&mut self, parser: &mut CellParser) -> Result, TonCellError> { + // reset state in case of reusing + self.cur_key_prefix = BigUint::one(); + + let mut result = HashMap::new(); + self.parse_impl(parser, &mut result)?; + Ok(result) + } + + fn parse_impl( + &mut self, + parser: &mut CellParser, + dst: &mut HashMap, + ) -> Result<(), TonCellError> { + // will rollback prefix to original value at the end of the function + let origin_key_prefix_len = self.cur_key_prefix.bits(); + + let label_type = self.detect_label_type(parser)?; + match label_type { + LabelType::Same => { + let prefix_val = parser.load_bit()?; + let prefix_len_len = self.remain_suffix_bit_len(); + let prefix_len = parser.load_uint(prefix_len_len)?; + let prefix_len_usize = prefix_len.to_usize().ok_or_else(|| { + InvalidInput(format!("Failed to convert BigUint to usize: {prefix_len}")) + })?; + if prefix_val { + self.cur_key_prefix += 1u32; + self.cur_key_prefix <<= prefix_len_usize; + self.cur_key_prefix -= 1u32; + } else { + self.cur_key_prefix <<= prefix_len_usize; + } + } + LabelType::Short => { + let prefix_len = parser.load_unary_length()?; + if prefix_len != 0 { + let val = parser.load_uint(prefix_len)?; + self.cur_key_prefix <<= prefix_len; + self.cur_key_prefix |= val; + } + } + LabelType::Long => { + let prefix_len_len = self.remain_suffix_bit_len(); + let prefix_len = parser.load_uint(prefix_len_len)?; + let prefix_len_usize = prefix_len.to_usize().ok_or_else(|| { + InvalidInput(format!("Failed to convert BigUint to usize: {prefix_len}")) + })?; + if prefix_len_len != 0 { + let val = parser.load_uint(prefix_len_usize)?; + self.cur_key_prefix <<= prefix_len_usize; + self.cur_key_prefix |= val; + } + } + } + if self.cur_key_prefix.bits() as usize == (self.key_len_bits + 1) { + let mut key = BigUint::one() << self.key_len_bits; + key ^= &self.cur_key_prefix; + let user_key = (self.key_reader)(&key)?; + let user_value = (self.val_reader)(parser)?; + dst.insert(user_key, user_value); + } else { + let left_ref = parser.next_reference()?; + self.cur_key_prefix <<= 1; + self.parse_impl(&mut left_ref.parser(), dst)?; + + let right_ref = parser.next_reference()?; + self.cur_key_prefix += BigUint::one(); + self.parse_impl(&mut right_ref.parser(), dst)?; + } + self.cur_key_prefix >>= self.cur_key_prefix.bits() - origin_key_prefix_len; + Ok(()) + } + + fn detect_label_type(&self, parser: &mut CellParser) -> Result { + let label = if parser.load_bit()? { + if parser.load_bit()? { + LabelType::Same + } else { + LabelType::Long + } + } else { + LabelType::Short + }; + Ok(label) + } + + fn remain_suffix_bit_len(&self) -> usize { + // add 2 because cur_prefix contains leading bit + let prefix_len_left = self.key_len_bits - self.cur_key_prefix.bits() as usize + 2; + (prefix_len_left as f32).log2().ceil() as usize + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/dict/predefined_readers.rs b/token-core/tcx-libs/tonlib-core/src/cell/dict/predefined_readers.rs new file mode 100644 index 00000000..8aba7b84 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/dict/predefined_readers.rs @@ -0,0 +1,99 @@ +use num_bigint::{BigInt, BigUint}; +use num_traits::ToPrimitive; + +use crate::cell::TonCellError::{InternalError, InvalidInput}; +use crate::cell::{ArcCell, Cell, CellParser, TonCellError}; +use crate::types::TON_HASH_BYTES; +use crate::TonHash; + +pub fn key_reader_u8(raw_key: &BigUint) -> Result { + validate_bit_len(raw_key, 8)?; + ok_or_err(raw_key.to_u8()) +} + +pub fn key_reader_u16(raw_key: &BigUint) -> Result { + validate_bit_len(raw_key, 16)?; + ok_or_err(raw_key.to_u16()) +} + +pub fn key_reader_u32(raw_key: &BigUint) -> Result { + validate_bit_len(raw_key, 32)?; + ok_or_err(raw_key.to_u32()) +} + +pub fn key_reader_u64(raw_key: &BigUint) -> Result { + validate_bit_len(raw_key, 64)?; + ok_or_err(raw_key.to_u64()) +} + +pub fn key_reader_256bit(val: &BigUint) -> Result { + validate_bit_len(val, TON_HASH_BYTES * 8)?; + let digits = val.to_bytes_be(); + let key_digits = if digits.len() < 32 { + let mut tmp = vec![0u8; 32 - digits.len()]; + tmp.extend(digits); + tmp + } else { + digits + }; + let slice: [u8; 32] = key_digits.try_into().map_err(|_| { + let msg = format!("Fail to get [u8; 32] from {}", val); + InternalError(msg) + })?; + Ok(slice) +} + +pub fn key_reader_uint(raw_key: &BigUint) -> Result { + Ok(raw_key.clone()) +} + +pub fn key_reader_decimal_string(raw_key: &BigUint) -> Result { + Ok(key_reader_uint(raw_key)?.to_str_radix(10)) +} + +pub fn val_reader_cell(parser: &mut CellParser) -> Result { + parser.load_remaining() +} + +pub fn val_reader_ref_cell(parser: &mut CellParser) -> Result { + parser.next_reference() +} + +pub fn val_reader_snake_formatted_string(parser: &mut CellParser) -> Result, TonCellError> { + let mut buffer = Vec::new(); + parser.next_reference()?.parse_snake_data(&mut buffer)?; + Ok(buffer) +} + +pub fn val_reader_uint(parser: &mut CellParser) -> Result { + let remaining = parser.remaining_bits(); + let result = parser.load_uint(remaining)?; + Ok(result) +} + +pub fn val_reader_int(parser: &mut CellParser) -> Result { + let remaining = parser.remaining_bits(); + let result = parser.load_int(remaining)?; + Ok(result) +} + +fn validate_bit_len(val: &BigUint, max_bits: usize) -> Result<(), TonCellError> { + if val.bits() > max_bits as u64 { + let msg = format!( + "Invalid value len: {}, expected {max_bits} bits", + val.bits() + ); + return Err(InvalidInput(msg)); + } + Ok(()) +} + +fn ok_or_err(val: Option) -> Result { + val.ok_or_else(|| { + let msg = format!( + "Fail to extract {} from BigUint", + std::any::type_name::() + ); + InternalError(msg) + }) +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/dict/predefined_writers.rs b/token-core/tcx-libs/tonlib-core/src/cell/dict/predefined_writers.rs new file mode 100644 index 00000000..c09b9f19 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/dict/predefined_writers.rs @@ -0,0 +1,35 @@ +use std::cmp::max; +use std::sync::Arc; + +use num_bigint::{BigInt, BigUint}; + +use crate::cell::{Cell, CellBuilder, TonCellError}; + +#[allow(dead_code)] +pub fn val_writer_ref_cell(builder: &mut CellBuilder, val: Arc) -> Result<(), TonCellError> { + builder.store_reference(&val)?; + Ok(()) +} + +pub fn val_writer_unsigned_min_size( + builder: &mut CellBuilder, + val: V, +) -> Result<(), TonCellError> +where + BigUint: From, +{ + let internal_val = BigUint::from(val); + let len_bits = max(1, internal_val.bits()) as usize; + builder.store_uint(len_bits, &internal_val)?; + Ok(()) +} + +pub fn val_writer_signed_min_size(builder: &mut CellBuilder, val: V) -> Result<(), TonCellError> +where + BigInt: From, +{ + let internal_val = BigInt::from(val); + let len_bits = max(1, internal_val.bits()) as usize; + builder.store_int(len_bits, &internal_val)?; + Ok(()) +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/dict/tests.rs b/token-core/tcx-libs/tonlib-core/src/cell/dict/tests.rs new file mode 100644 index 00000000..7290bc40 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/dict/tests.rs @@ -0,0 +1,232 @@ +// tests cover parser & builder together, so make sense to keep them in the same module +use std::collections::HashMap; +use std::ops::Deref; + +use num_bigint::BigUint; +use tokio_test::assert_ok; + +use crate::cell::dict::predefined_readers::{ + key_reader_256bit, key_reader_u16, key_reader_u32, key_reader_u64, key_reader_u8, + key_reader_uint, val_reader_ref_cell, val_reader_uint, +}; +use crate::cell::dict::predefined_writers::{val_writer_ref_cell, val_writer_unsigned_min_size}; +use crate::cell::{ArcCell, BagOfCells, Cell, CellBuilder}; + +#[test] +fn test_blockchain_data() -> anyhow::Result<()> { + let expected_data = HashMap::from([ + (0u8, BigUint::from(25965603044000000000u128)), + (1, BigUint::from(5173255344000000000u64)), + (2, BigUint::from(344883687000000000u64)), + ]); + let boc_b64 = "te6cckEBBgEAWgABGccNPKUADZm5MepOjMABAgHNAgMCASAEBQAnQAAAAAAAAAAAAAABMlF4tR2RgCAAJgAAAAAAAAAAAAABaFhaZZhr6AAAJgAAAAAAAAAAAAAAR8sYU4eC4AA1PIC5"; + let boc = BagOfCells::parse_base64(boc_b64)?; + let dict_cell = boc.single_root()?; + let mut parser = dict_cell.parser(); + let cell_data = parser.load_uint(96)?; + + let parsed_dict = assert_ok!(parser.load_dict(8, key_reader_u8, val_reader_uint)); + assert_eq!(expected_data, parsed_dict); + + let writer = |builder: &mut CellBuilder, val: BigUint| { + builder.store_uint(150, &val)?; // empirically found bit length + Ok(()) + }; + let mut builder = CellBuilder::new(); + builder.store_uint(96, &cell_data)?; + assert_ok!(builder.store_dict(8, writer, expected_data)); + let constructed_cell: Cell = builder.build()?; + assert_eq!(dict_cell.deref(), &constructed_cell); + Ok(()) +} + +#[test] +fn test_key_len_bigger_than_reader() -> anyhow::Result<()> { + let data = HashMap::from([ + (0u16, BigUint::from(4u32)), + (1, BigUint::from(5u32)), + (2, BigUint::from(6u32)), + (10u16, BigUint::from(7u32)), + (127, BigUint::from(8u32)), + ]); + + for key_len_bits in [8, 16, 32, 64, 111] { + let mut builder = CellBuilder::new(); + builder.store_dict(key_len_bits, val_writer_unsigned_min_size, data.clone())?; + let dict_cell = builder.build()?; + let parsed = dict_cell + .parser() + .load_dict(key_len_bits, key_reader_u16, val_reader_uint)?; + assert_eq!(data, parsed, "key_len_bits: {}", key_len_bits); + } + Ok(()) +} + +#[test] +fn test_reader_u8() -> anyhow::Result<()> { + let data = HashMap::from([ + (0u8, BigUint::from(4u32)), + (1, BigUint::from(5u32)), + (2, BigUint::from(6u32)), + (64, BigUint::from(7u32)), + ]); + let key_len_bits = 8; + let mut builder = CellBuilder::new(); + builder.store_dict(key_len_bits, val_writer_unsigned_min_size, data.clone())?; + let dict_cell = builder.build()?; + let parsed = dict_cell + .parser() + .load_dict(key_len_bits, key_reader_u8, val_reader_uint)?; + assert_eq!(data, parsed); + Ok(()) +} + +#[test] +fn test_reader_u16() -> anyhow::Result<()> { + let data = HashMap::from([ + (0u16, BigUint::from(4u32)), + (1, BigUint::from(5u32)), + (2, BigUint::from(6u32)), + (64, BigUint::from(7u32)), + ]); + let key_len_bits = 8; + let mut builder = CellBuilder::new(); + builder.store_dict(key_len_bits, val_writer_unsigned_min_size, data.clone())?; + let dict_cell = builder.build()?; + let parsed = dict_cell + .parser() + .load_dict(key_len_bits, key_reader_u16, val_reader_uint)?; + assert_eq!(data, parsed); + Ok(()) +} + +#[test] +fn test_reader_u32() -> anyhow::Result<()> { + let data = HashMap::from([ + (0u32, BigUint::from(4u32)), + (1, BigUint::from(5u32)), + (2, BigUint::from(6u32)), + (64, BigUint::from(7u32)), + ]); + let key_len_bits = 8; + let mut builder = CellBuilder::new(); + builder.store_dict(key_len_bits, val_writer_unsigned_min_size, data.clone())?; + let dict_cell = builder.build()?; + let parsed = dict_cell + .parser() + .load_dict(key_len_bits, key_reader_u32, val_reader_uint)?; + assert_eq!(data, parsed); + Ok(()) +} + +#[test] +fn test_reader_u64() -> anyhow::Result<()> { + let data = HashMap::from([ + (0u64, BigUint::from(4u32)), + (1, BigUint::from(5u32)), + (2, BigUint::from(6u32)), + (64, BigUint::from(7u32)), + ]); + let key_len_bits = 8; + let mut builder = CellBuilder::new(); + builder.store_dict(key_len_bits, val_writer_unsigned_min_size, data.clone())?; + let dict_cell = builder.build()?; + let parsed = dict_cell + .parser() + .load_dict(key_len_bits, key_reader_u64, val_reader_uint)?; + assert_eq!(data, parsed); + Ok(()) +} + +#[test] +fn test_reader_256bit() -> anyhow::Result<()> { + let bytes1 = [ + 1u8, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, + 4, 4, + ]; + let bytes2 = [ + 2u8, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, + 5, 5, + ]; + let bytes3 = [ + 3u8, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, + 6, 6, + ]; + let bytes4 = [ + 4u8, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, + 7, 7, + ]; + + let data_src = HashMap::from([ + (bytes1, BigUint::from(1u32)), + (bytes2, BigUint::from(2u32)), + (bytes3, BigUint::from(3u32)), + (bytes4, BigUint::from(4u32)), + ]); + + let data_serial = data_src + .iter() + .map(|(k, v)| (BigUint::from_bytes_be(k), v.clone())) + .collect::>(); + + let key_len_bits = 256; + let mut builder = CellBuilder::new(); + builder.store_dict(key_len_bits, val_writer_unsigned_min_size, data_serial)?; + + let dict_cell = builder.build()?; + let parsed = dict_cell + .parser() + .load_dict(key_len_bits, key_reader_256bit, val_reader_uint)?; + + assert_eq!(data_src, parsed); + Ok(()) +} + +#[test] +fn test_reader_uint() -> anyhow::Result<()> { + let data = HashMap::from([ + (BigUint::from(0u32), BigUint::from(4u32)), + (BigUint::from(1u32), BigUint::from(5u32)), + (BigUint::from(2u32), BigUint::from(6u32)), + (BigUint::from(64u32), BigUint::from(7u32)), + ]); + let key_len_bits = 8; + let mut builder = CellBuilder::new(); + builder.store_dict(key_len_bits, val_writer_unsigned_min_size, data.clone())?; + let dict_cell = builder.build()?; + let parsed = dict_cell + .parser() + .load_dict(key_len_bits, key_reader_uint, val_reader_uint)?; + assert_eq!(data, parsed); + Ok(()) +} + +#[test] +fn test_reader_cell() -> anyhow::Result<()> { + let data = HashMap::from([ + ( + BigUint::from(0u32), + ArcCell::new(Cell::new(vec![0], 20, vec![], false)?), + ), + ( + BigUint::from(1u32), + ArcCell::new(Cell::new(vec![1], 20, vec![], false)?), + ), + ( + BigUint::from(2u32), + ArcCell::new(Cell::new(vec![2], 20, vec![], false)?), + ), + ( + BigUint::from(6u32), + ArcCell::new(Cell::new(vec![6], 20, vec![], false)?), + ), + ]); + let key_len_bits = 8; + let mut builder = CellBuilder::new(); + builder.store_dict(key_len_bits, val_writer_ref_cell, data.clone())?; + let dict_cell = builder.build()?; + let mut parser = dict_cell.parser(); + let parsed = parser.load_dict(key_len_bits, key_reader_uint, val_reader_ref_cell)?; + assert_eq!(data, parsed); + Ok(()) +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/dict/types.rs b/token-core/tcx-libs/tonlib-core/src/cell/dict/types.rs new file mode 100644 index 00000000..a0e6d5a3 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/dict/types.rs @@ -0,0 +1,18 @@ +use std::collections::HashMap; + +use num_bigint::BigUint; + +use crate::cell::{CellBuilder, CellParser, TonCellError}; +use crate::TonHash; + +#[derive(Debug)] +pub(crate) enum LabelType { + Short, // high bit is 0 + Long, // high bits are 10 + Same, // high bits are 11 +} + +pub type SnakeFormatDict = HashMap>; +pub type KeyReader = fn(&BigUint) -> Result; +pub type ValReader = fn(&mut CellParser) -> Result; +pub type ValWriter = fn(&mut CellBuilder, V) -> Result<(), TonCellError>; diff --git a/token-core/tcx-libs/tonlib-core/src/cell/error.rs b/token-core/tcx-libs/tonlib-core/src/cell/error.rs new file mode 100644 index 00000000..76a8a990 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/error.rs @@ -0,0 +1,115 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum TonCellError { + #[error("Bag of cells deserialization error ({0})")] + BagOfCellsDeserializationError(String), + + #[error("Bag of cells serialization error ({0})")] + BagOfCellsSerializationError(String), + + #[error("Cell builder error ({0})")] + CellBuilderError(String), + + #[error("Cell parser error ({0})")] + CellParserError(String), + + #[error("Internal error ({0})")] + InternalError(String), + + #[error("Invalid index (Index: {idx}, reference count: {ref_count})")] + InvalidIndex { idx: usize, ref_count: usize }, + + #[error("Invalid address type (Type: {0})")] + InvalidAddressType(u8), + + #[error("Invalid cell type for exotic cell (Type: {0:?})")] + InvalidExoticCellType(Option), + + #[error("Invalid exotic cell data (({0})")] + InvalidExoticCellData(String), + + #[error("Invalid cell data ({0})")] + InvalidCellData(String), + + #[error("Invalid input error ({0})")] + InvalidInput(String), + + #[error( + "Non-empty reader (Remaining bits: {remaining_bits}, Remaining refs: {remaining_refs})" + )] + NonEmptyReader { + remaining_bits: usize, + remaining_refs: usize, + }, +} + +pub trait MapTonCellError +where + E: std::error::Error, +{ + fn map_boc_deserialization_error(self) -> Result; + + fn map_boc_serialization_error(self) -> Result; + + fn map_cell_builder_error(self) -> Result; + + fn map_cell_parser_error(self) -> Result; +} + +impl MapTonCellError for Result +where + E: std::error::Error, +{ + fn map_boc_serialization_error(self) -> Result { + self.map_err(|e| TonCellError::boc_serialization_error(e)) + } + + fn map_boc_deserialization_error(self) -> Result { + self.map_err(|e| TonCellError::boc_deserialization_error(e)) + } + + fn map_cell_builder_error(self) -> Result { + self.map_err(|e| TonCellError::cell_builder_error(e)) + } + + fn map_cell_parser_error(self) -> Result { + self.map_err(|e| TonCellError::cell_parser_error(e)) + } +} + +impl TonCellError { + pub fn boc_serialization_error(e: T) -> TonCellError + where + T: ToString, + { + TonCellError::BagOfCellsSerializationError(format!( + "BoC serialization error: {}", + e.to_string() + )) + } + + pub fn boc_deserialization_error(e: T) -> TonCellError + where + T: ToString, + { + TonCellError::BagOfCellsDeserializationError(format!( + "BoC deserialization error: {}", + e.to_string() + )) + } + + pub fn cell_builder_error(e: T) -> TonCellError + where + T: ToString, + { + TonCellError::CellBuilderError(format!("Cell builder error: {}", e.to_string())) + } + + pub fn cell_parser_error(e: T) -> TonCellError + where + T: ToString, + { + TonCellError::CellParserError(format!("Cell parser error: {}", e.to_string())) + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/level_mask.rs b/token-core/tcx-libs/tonlib-core/src/cell/level_mask.rs new file mode 100644 index 00000000..e4f7f4c3 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/level_mask.rs @@ -0,0 +1,48 @@ +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] +pub struct LevelMask { + mask: u32, +} + +impl LevelMask { + pub fn new(new_mask: u32) -> Self { + Self { mask: new_mask } + } + + pub fn mask(&self) -> u32 { + self.mask + } + + pub fn level(&self) -> u8 { + 32 - self.mask.leading_zeros() as u8 + } + + pub fn hash_index(&self) -> usize { + self.mask.count_ones() as usize + } + + pub fn hash_count(&self) -> usize { + self.hash_index() + 1 + } + + pub fn apply(&self, level: u8) -> Self { + LevelMask { + mask: self.mask & ((1u32 << level) - 1), + } + } + + pub fn apply_or(&self, other: Self) -> Self { + LevelMask { + mask: self.mask | other.mask, + } + } + + pub fn shift_right(&self) -> Self { + LevelMask { + mask: self.mask >> 1, + } + } + + pub fn is_significant(&self, level: u8) -> bool { + level == 0 || ((self.mask >> (level - 1)) % 2 != 0) + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/parser.rs b/token-core/tcx-libs/tonlib-core/src/cell/parser.rs new file mode 100644 index 00000000..43733214 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/parser.rs @@ -0,0 +1,638 @@ +use std::collections::HashMap; +use std::hash::Hash; +use std::io::Cursor; +use std::sync::Arc; + +use bitstream_io::{BigEndian, BitRead, BitReader, Numeric}; +use num_bigint::{BigInt, BigUint, Sign}; +use num_traits::identities::Zero; + +use super::dict::{DictParser, KeyReader, SnakeFormatDict, ValReader}; +use super::{ArcCell, Cell, CellBuilder}; +use crate::cell::dict::predefined_readers::{key_reader_256bit, val_reader_snake_formatted_string}; +use crate::cell::util::*; +use crate::cell::{MapTonCellError, TonCellError}; +use crate::TonAddress; + +pub struct CellParser<'a> { + pub(crate) bit_len: usize, + pub(crate) bit_reader: BitReader, BigEndian>, + pub(crate) references: &'a [ArcCell], + next_ref: usize, +} + +impl<'a> CellParser<'a> { + pub fn new(bit_len: usize, data: &'a [u8], references: &'a [ArcCell]) -> Self { + let cursor = Cursor::new(data); + let bit_reader = BitReader::endian(cursor, BigEndian); + CellParser { + bit_len, + bit_reader, + references, + next_ref: 0, + } + } + + pub fn remaining_bits(&mut self) -> usize { + let pos = self.bit_reader.position_in_bits().unwrap_or_default() as usize; + if self.bit_len > pos { + self.bit_len - pos + } else { + 0 + } + } + + /// Return number of full bytes remaining + pub fn remaining_bytes(&mut self) -> usize { + self.remaining_bits() / 8 + } + + pub fn load_bit(&mut self) -> Result { + self.ensure_enough_bits(1)?; + self.bit_reader.read_bit().map_cell_parser_error() + } + + pub fn load_u8(&mut self, bit_len: usize) -> Result { + self.load_number(bit_len) + } + + pub fn load_i8(&mut self, bit_len: usize) -> Result { + self.load_number(bit_len) + } + + pub fn load_u16(&mut self, bit_len: usize) -> Result { + self.load_number(bit_len) + } + + pub fn load_i16(&mut self, bit_len: usize) -> Result { + self.load_number(bit_len) + } + + pub fn load_u32(&mut self, bit_len: usize) -> Result { + self.load_number(bit_len) + } + + pub fn load_i32(&mut self, bit_len: usize) -> Result { + self.load_number(bit_len) + } + + pub fn load_u64(&mut self, bit_len: usize) -> Result { + self.load_number(bit_len) + } + + pub fn load_i64(&mut self, bit_len: usize) -> Result { + self.load_number(bit_len) + } + + pub fn load_uint(&mut self, bit_len: usize) -> Result { + self.ensure_enough_bits(bit_len)?; + let num_words = (bit_len + 31) / 32; + let high_word_bits = if bit_len % 32 == 0 { 32 } else { bit_len % 32 }; + let mut words: Vec = vec![0_u32; num_words]; + let high_word = self.load_u32(high_word_bits)?; + words[num_words - 1] = high_word; + for i in (0..num_words - 1).rev() { + let word = self.load_u32(32)?; + words[i] = word; + } + let big_uint = BigUint::new(words); + Ok(big_uint) + } + + pub fn load_int(&mut self, bit_len: usize) -> Result { + self.ensure_enough_bits(bit_len)?; + let num_words = (bit_len + 31) / 32; + let high_word_bits = if bit_len % 32 == 0 { 32 } else { bit_len % 32 }; + let mut words: Vec = vec![0_u32; num_words]; + let high_word = self.load_u32(high_word_bits)?; + let sign = if (high_word & (1 << 31)) == 0 { + Sign::Plus + } else { + Sign::Minus + }; + words[num_words - 1] = high_word; + for i in (0..num_words - 1).rev() { + let word = self.load_u32(32)?; + words[i] = word; + } + let big_uint = BigInt::new(sign, words); + Ok(big_uint) + } + + pub fn load_byte(&mut self) -> Result { + self.load_u8(8) + } + + pub fn load_slice(&mut self, slice: &mut [u8]) -> Result<(), TonCellError> { + self.ensure_enough_bits(slice.len() * 8)?; + self.bit_reader.read_bytes(slice).map_cell_parser_error() + } + + pub fn load_bytes(&mut self, num_bytes: usize) -> Result, TonCellError> { + let mut res = vec![0_u8; num_bytes]; + self.load_slice(res.as_mut_slice())?; + Ok(res) + } + + pub fn load_bits_to_slice( + &mut self, + num_bits: usize, + slice: &mut [u8], + ) -> Result<(), TonCellError> { + self.ensure_enough_bits(num_bits)?; + self.bit_reader.read_bits(num_bits, slice)?; + Ok(()) + } + + pub fn load_bits(&mut self, num_bits: usize) -> Result, TonCellError> { + let total_bytes = (num_bits + 7) / 8; + let mut res = vec![0_u8; total_bytes]; + self.load_bits_to_slice(num_bits, res.as_mut_slice())?; + Ok(res) + } + + pub fn load_utf8(&mut self, num_bytes: usize) -> Result { + let bytes = self.load_bytes(num_bytes)?; + String::from_utf8(bytes).map_cell_parser_error() + } + + pub fn load_coins(&mut self) -> Result { + let num_bytes = self.load_u8(4)?; + if num_bytes == 0 { + Ok(BigUint::zero()) + } else { + self.load_uint(num_bytes as usize * 8) + } + } + + pub fn load_remaining(&mut self) -> Result { + let mut builder = CellBuilder::new(); + builder.store_remaining_bits(self)?; + builder.store_references(&self.references[self.next_ref..])?; + let cell = builder.build(); + self.next_ref = self.references.len(); + cell + } + + pub fn load_address(&mut self) -> Result { + self.ensure_enough_bits(2)?; + let tp = self.bit_reader.read::(2).map_cell_parser_error()?; + match tp { + 0 => Ok(TonAddress::null()), + 2 => { + self.ensure_enough_bits(1 + 8 + 32 * 8)?; + let _res1 = self.bit_reader.read::(1).map_cell_parser_error()?; + let wc = self.bit_reader.read::(8).map_cell_parser_error()?; + let mut hash_part = [0_u8; 32]; + self.bit_reader + .read_bytes(&mut hash_part) + .map_cell_parser_error()?; + let addr = TonAddress::new(wc as i32, &hash_part); + Ok(addr) + } + _ => Err(TonCellError::InvalidAddressType(tp)), + } + } + + pub fn load_unary_length(&mut self) -> Result { + let mut res = 0; + while self.load_bit()? { + res += 1; + } + Ok(res) + } + + pub fn load_dict_data( + &mut self, + key_len: usize, + key_reader: KeyReader, + val_reader: ValReader, + ) -> Result, TonCellError> { + let mut dict_parser = DictParser::new(key_len, key_reader, val_reader); + dict_parser.parse(self) + } + + pub fn load_dict( + &mut self, + key_len: usize, + key_reader: KeyReader, + val_reader: ValReader, + ) -> Result, TonCellError> { + let has_data = self.load_bit()?; + if !has_data { + Ok(HashMap::new()) + } else { + let reference_cell = self.next_reference()?; + let mut reference_parser = reference_cell.parser(); + reference_parser.load_dict_data(key_len, key_reader, val_reader) + } + } + ///Snake format when we store part of the data in a cell and the rest of the data in the first child cell (and so recursively). + /// + ///Must be prefixed with 0x00 byte. + ///### TL-B scheme: + /// + /// ``` tail#_ {bn:#} b:(bits bn) = SnakeData ~0; ``` + /// + /// ``` cons#_ {bn:#} {n:#} b:(bits bn) next:^(SnakeData ~n) = SnakeData ~(n + 1); ``` + pub fn load_dict_snake_format(&mut self) -> Result { + self.load_dict(256, key_reader_256bit, val_reader_snake_formatted_string) + } + + pub fn load_dict_data_snake_format(&mut self) -> Result { + self.load_dict_data(256, key_reader_256bit, val_reader_snake_formatted_string) + } + + pub fn ensure_empty(&mut self) -> Result<(), TonCellError> { + let remaining_bits = self.remaining_bits(); + let remaining_refs = self.references.len() - self.next_ref; + // if remaining_bits == 0 && remaining_refs == 0 { // todo: We will restore reference checking in in 0.18 + if remaining_bits == 0 { + Ok(()) + } else { + Err(TonCellError::NonEmptyReader { + remaining_bits, + remaining_refs, + }) + } + } + + pub fn skip_bits(&mut self, num_bits: usize) -> Result<(), TonCellError> { + self.ensure_enough_bits(num_bits)?; + self.bit_reader + .skip(num_bits as u32) + .map_cell_parser_error() + } + + fn load_number(&mut self, bit_len: usize) -> Result { + self.ensure_enough_bits(bit_len)?; + + self.bit_reader + .read::(bit_len as u32) + .map_cell_parser_error() + } + + fn ensure_enough_bits(&mut self, bit_len: usize) -> Result<(), TonCellError> { + if self.remaining_bits() < bit_len { + return Err(TonCellError::CellParserError( + "Not enough bits to read".to_owned(), + )); + } + Ok(()) + } + + pub fn next_reference(&mut self) -> Result { + if self.next_ref < self.references.len() { + let reference = self.references[self.next_ref].clone(); + self.next_ref += 1; + + Ok(reference) + } else { + Err(TonCellError::CellParserError( + "Not enough references to read".to_owned(), + )) + } + } + // https://docs.ton.org/develop/data-formats/tl-b-types#eiher + pub fn load_either_cell_or_cell_ref(&mut self) -> Result { + // TODO: think about how we can make it generic + let is_ref = self.load_bit()?; + if is_ref { + Ok(self.next_reference()?) + } else { + let remaining_bits = self.remaining_bits(); + let data = self.load_bits(remaining_bits)?; + let remaining_ref_count = self.references.len() - self.next_ref; + let mut references = vec![]; + for _ in 0..remaining_ref_count { + references.push(self.next_reference()?) + } + let result = Arc::new(Cell::new(data, remaining_bits, references, false)?); + Ok(result) + } + } + // https://docs.ton.org/develop/data-formats/tl-b-types#maybe + pub fn load_maybe_cell_ref(&mut self) -> Result, TonCellError> { + let is_some = self.load_bit()?; + if is_some { + Ok(Some(self.next_reference()?)) + } else { + Ok(None) + } + } +} + +#[cfg(test)] +mod tests { + + use std::sync::Arc; + + use num_bigint::{BigInt, BigUint}; + + use crate::cell::{Cell, CellBuilder, EitherCellLayout}; + use crate::TonAddress; + + #[test] + fn test_load_bit() { + let cell = Cell::new([0b10101010].to_vec(), 4, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert!(parser.load_bit().unwrap()); + assert!(!parser.load_bit().unwrap()); + assert!(parser.load_bit().unwrap()); + assert!(!parser.load_bit().unwrap()); + assert!(parser.load_bit().is_err()); + } + + #[test] + fn test_load_u8() { + let cell = Cell::new([0b10101010].to_vec(), 4, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_u8(4).unwrap(), 0b1010); + assert!(parser.load_u8(1).is_err()); + } + + #[test] + fn test_load_i8() { + let cell = Cell::new([0b10101010].to_vec(), 4, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_i8(4).unwrap(), 0b1010); + assert!(parser.load_i8(2).is_err()); + + let cell = Cell::new([0b10100110, 0b10101010].to_vec(), 13, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_i8(4).unwrap(), 0b1010); + assert_eq!(parser.load_i8(8).unwrap(), 0b01101010); + assert!(parser.load_i8(2).is_err()); + } + + #[test] + fn test_load_u16() { + let cell = Cell::new([0b10101010, 0b01010101].to_vec(), 12, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_u16(8).unwrap(), 0b10101010); + assert!(parser.load_u16(8).is_err()); + } + + #[test] + fn test_load_i16() { + let cell = Cell::new([0b10101010, 0b01010101].to_vec(), 12, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_i16(9).unwrap(), 0b101010100); + assert!(parser.load_i16(4).is_err()); + } + + #[test] + fn test_load_u32() { + let cell = Cell::new([0b10101010, 0b01010101].to_vec(), 13, vec![], false).unwrap(); + let mut parser = cell.parser(); + + assert_eq!(parser.load_u32(8).unwrap(), 0b10101010); + assert!(parser.load_u32(8).is_err()); + } + + #[test] + fn test_load_i32() { + let cell = Cell::new([0b10101010, 0b01010101].to_vec(), 14, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_i32(10).unwrap(), 0b1010101001); + assert!(parser.load_i32(5).is_err()); + } + + #[test] + fn test_load_u64() { + let cell = Cell::new([0b10101010, 0b01010101].to_vec(), 13, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_u64(8).unwrap(), 0b10101010); + assert!(parser.load_u64(8).is_err()); + } + + #[test] + fn test_load_i64() { + let cell = Cell::new([0b10101010, 0b01010101].to_vec(), 14, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_i64(10).unwrap(), 0b1010101001); + assert!(parser.load_i64(5).is_err()); + } + + #[test] + fn test_load_int() { + let cell = Cell::new([0b10101010, 0b01010101].to_vec(), 14, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_int(10).unwrap(), BigInt::from(0b1010101001)); + assert!(parser.load_int(5).is_err()); + } + + #[test] + fn test_load_uint() { + let cell = Cell::new([0b10101010, 0b01010101].to_vec(), 14, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!( + parser.load_uint(10).unwrap(), + BigUint::from(0b1010101001u64) + ); + assert!(parser.load_uint(5).is_err()); + } + + #[test] + fn test_load_byte() { + let cell = Cell::new([0b10101010, 0b01010101].to_vec(), 15, vec![], false).unwrap(); + let mut parser = cell.parser(); + parser.load_bit().unwrap(); + assert_eq!(parser.load_byte().unwrap(), 0b01010100u8); + assert!(parser.load_byte().is_err()); + } + + #[test] + fn test_load_slice() { + let cell = Cell::new( + [0b10101010, 0b01010101, 0b10101010, 0b10101010, 0b10101010].to_vec(), + 32, + vec![], + false, + ) + .unwrap(); + let mut parser = cell.parser(); + parser.load_bit().unwrap(); + let mut slice = [0; 2]; + parser.load_slice(&mut slice).unwrap(); + assert_eq!(slice, [0b01010100, 0b10101011]); + assert!(parser.load_slice(&mut slice).is_err()); + } + + #[test] + fn test_load_bytes() { + let cell = Cell::new( + [0b10101010, 0b01010101, 0b10101010, 0b10101010, 0b10101010].to_vec(), + 32, + vec![], + false, + ) + .unwrap(); + let mut parser = cell.parser(); + parser.load_bit().unwrap(); + let slice = parser.load_bytes(2).unwrap(); + assert_eq!(slice, [0b01010100, 0b10101011]); + assert!(parser.load_bytes(2).is_err()); + } + + #[test] + fn test_load_bits_to_slice() { + let cell = Cell::new( + [0b10101010, 0b01010101, 0b10101010, 0b10101010, 0b10101010].to_vec(), + 22, + vec![], + false, + ) + .unwrap(); + let mut parser = cell.parser(); + parser.load_bit().unwrap(); + let mut slice = [0; 2]; + parser.load_bits_to_slice(12, &mut slice).unwrap(); + assert_eq!(slice, [0b01010100, 0b10100000]); + assert!(parser.load_bits_to_slice(10, &mut slice).is_err()); + } + + #[test] + fn test_load_bits() { + let cell = Cell::new( + [0b10101010, 0b01010101, 0b10101010, 0b10101010, 0b10101010].to_vec(), + 25, + vec![], + false, + ) + .unwrap(); + let mut parser = cell.parser(); + parser.load_bit().unwrap(); + let slice = parser.load_bits(5).unwrap(); + assert_eq!(slice, [0b01010000]); + let slice = parser.load_bits(15).unwrap(); + assert_eq!(slice, [0b10010101, 0b01101010]); + assert!(parser.load_bits(5).is_err()); + } + + #[test] + fn test_load_utf8() { + let cell = Cell::new("a1j\0".as_bytes().to_vec(), 31, vec![], false).unwrap(); + let mut parser = cell.parser(); + let string = parser.load_utf8(2).unwrap(); + assert_eq!(string, "a1"); + let string = parser.load_utf8(1).unwrap(); + assert_eq!(string, "j"); + assert!(parser.load_utf8(1).is_err()); + } + + #[test] + fn test_load_coins() { + let cell = Cell::new( + [ + 0b00011111, 0b11110011, 0b11110011, 0b11110011, 0b11110011, 0b00011111, 0b11110011, + ] + .to_vec(), + 48, + vec![], + false, + ) + .unwrap(); + let mut parser = cell.parser(); + + assert_eq!(parser.load_coins().unwrap(), BigUint::from(0b11111111u64)); + assert_eq!( + parser.load_coins().unwrap(), + BigUint::from(0b111100111111001111110011u64) + ); + assert!(parser.load_coins().is_err()); + } + + #[test] + fn test_load_address() { + let cell = Cell::new([0].to_vec(), 3, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_address().unwrap(), TonAddress::null()); + assert!(parser.load_address().is_err()); + + // with full addresses + let cell = Cell::new( + [ + 0b10000000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0b00010000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0b00000010, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + ] + .to_vec(), + (3 + 8 + 32 * 8) * 3 - 1, + vec![], + false, + ) + .unwrap(); + let mut parser = cell.parser(); + assert_eq!(parser.load_address().unwrap(), TonAddress::null()); + assert_eq!(parser.load_address().unwrap(), TonAddress::null()); + assert!(parser.load_address().is_err()); + } + + #[test] + fn test_ensure_empty() { + let cell = Cell::new([0b10101010].to_vec(), 7, vec![], false).unwrap(); + let mut parser = cell.parser(); + parser.load_u8(4).unwrap(); + assert!(parser.ensure_empty().is_err()); + parser.load_u8(3).unwrap(); + assert!(parser.ensure_empty().is_ok()); + } + + #[test] + fn test_skip_bits_not_enough_bits() { + let cell = Cell::new([0b11111001, 0b00001010].to_vec(), 12, vec![], false).unwrap(); + let mut parser = cell.parser(); + assert!(parser.skip_bits(5).is_ok()); + assert_eq!(parser.load_bits(5).unwrap(), [0b00100000]); + assert!(parser.skip_bits(3).is_err()); + } + + #[test] + fn test_parser_with_refs() { + let ref1 = Cell::new([0b11111001, 0b00001010].to_vec(), 12, vec![], false).unwrap(); + let ref2 = Cell::new([0b11111001, 0b00001010].to_vec(), 12, vec![], false).unwrap(); + let cell = Cell::new( + [0b11111001, 0b00001010].to_vec(), + 12, + vec![ref1.into(), ref2.into()], + false, + ) + .unwrap(); + let mut parser = cell.parser(); + + assert!(parser.next_reference().is_ok()); + assert!(parser.next_reference().is_ok()); + assert!(parser.next_reference().is_err()); + } + + #[test] + fn test_either_with_references() { + let reference_cell = Cell::new([0xA5, 0x5A].to_vec(), 12, vec![], false).unwrap(); + let cell_either = Arc::new( + Cell::new( + [0xFF, 0xB0].to_vec(), + 12, + vec![reference_cell.into()], + false, + ) + .unwrap(), + ); + let cell = CellBuilder::new() + .store_bit(true) + .unwrap() + .store_either_cell_or_cell_ref(&cell_either, EitherCellLayout::Native) + .unwrap() + .build() + .unwrap(); + + let mut parser = cell.parser(); + + let result_first_bit = parser.load_bit().unwrap(); + let result_cell_either = parser.load_either_cell_or_cell_ref().unwrap(); + + assert!(result_first_bit); + assert_eq!(result_cell_either, cell_either); + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/raw.rs b/token-core/tcx-libs/tonlib-core/src/cell/raw.rs new file mode 100644 index 00000000..85092cc3 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/raw.rs @@ -0,0 +1,341 @@ +use std::io::Cursor; + +use bitstream_io::{BigEndian, BitWrite, BitWriter, ByteRead, ByteReader}; +use crc::Crc; +use lazy_static::lazy_static; + +use crate::cell::level_mask::LevelMask; +use crate::cell::{MapTonCellError, TonCellError}; + +lazy_static! { + pub static ref CRC_32_ISCSI: Crc = Crc::::new(&crc::CRC_32_ISCSI); +} + +/// Raw representation of Cell. +/// +/// References are stored as indices in BagOfCells. +#[derive(PartialEq, Eq, Debug, Clone, Hash)] +pub(crate) struct RawCell { + pub(crate) data: Vec, + pub(crate) bit_len: usize, + pub(crate) references: Vec, + pub(crate) is_exotic: bool, + level_mask: u32, +} + +impl RawCell { + pub(crate) fn new( + data: Vec, + bit_len: usize, + references: Vec, + level_mask: u32, + is_exotic: bool, + ) -> Self { + Self { + data, + bit_len, + references, + level_mask: level_mask & 7, + is_exotic, + } + } +} + +/// Raw representation of BagOfCells. +/// +/// `cells` must be topologically sorted. +#[derive(PartialEq, Eq, Debug, Clone, Hash)] +pub(crate) struct RawBagOfCells { + pub(crate) cells: Vec, + pub(crate) roots: Vec, +} + +const GENERIC_BOC_MAGIC: u32 = 0xb5ee9c72; +const _INDEXED_BOC_MAGIC: u32 = 0x68ff65f3; +const _INDEXED_CRC32_MAGIC: u32 = 0xacc3a728; + +impl RawBagOfCells { + pub(crate) fn parse(serial: &[u8]) -> Result { + let cursor = Cursor::new(serial); + + let mut reader: ByteReader, BigEndian> = + ByteReader::endian(cursor, BigEndian); + // serialized_boc#b5ee9c72 + let magic = reader.read::().map_boc_deserialization_error()?; + + let (has_idx, has_crc32c, _has_cache_bits, size) = match magic { + GENERIC_BOC_MAGIC => { + // has_idx:(## 1) has_crc32c:(## 1) has_cache_bits:(## 1) flags:(## 2) { flags = 0 } + let header = reader.read::().map_boc_deserialization_error()?; + let has_idx = (header >> 7) & 1 == 1; + let has_crc32c = (header >> 6) & 1 == 1; + let has_cache_bits = (header >> 5) & 1 == 1; + // size:(## 3) { size <= 4 } + let size = header & 0b0000_0111; + + (has_idx, has_crc32c, has_cache_bits, size) + } + magic => { + return Err(TonCellError::boc_deserialization_error(format!( + "Unsupported cell magic number: {:#}", + magic + ))); + } + }; + // off_bytes:(## 8) { off_bytes <= 8 } + let off_bytes = reader.read::().map_boc_deserialization_error()?; + //cells:(##(size * 8)) + let cells = read_var_size(&mut reader, size)?; + // roots:(##(size * 8)) { roots >= 1 } + let roots = read_var_size(&mut reader, size)?; + // absent:(##(size * 8)) { roots + absent <= cells } + let _absent = read_var_size(&mut reader, size)?; + // tot_cells_size:(##(off_bytes * 8)) + let _tot_cells_size = read_var_size(&mut reader, off_bytes)?; + // root_list:(roots * ##(size * 8)) + let mut root_list = vec![]; + for _ in 0..roots { + root_list.push(read_var_size(&mut reader, size)?) + } + // index:has_idx?(cells * ##(off_bytes * 8)) + let mut index = vec![]; + if has_idx { + for _ in 0..cells { + index.push(read_var_size(&mut reader, off_bytes)?) + } + } + // cell_data:(tot_cells_size * [ uint8 ]) + let mut cell_vec = Vec::with_capacity(cells); + + for _ in 0..cells { + let cell = read_cell(&mut reader, size)?; + cell_vec.push(cell); + } + // crc32c:has_crc32c?uint32 + let _crc32c = if has_crc32c { + reader.read::().map_boc_deserialization_error()? + } else { + 0 + }; + // TODO: Check crc32 + + Ok(RawBagOfCells { + cells: cell_vec, + roots: root_list, + }) + } + + pub(crate) fn serialize(&self, has_crc32: bool) -> Result, TonCellError> { + //Based on https://github.com/toncenter/tonweb/blob/c2d5d0fc23d2aec55a0412940ce6e580344a288c/src/boc/Cell.js#L198 + + let root_count = self.roots.len(); + let num_ref_bits = 32 - (self.cells.len() as u32).leading_zeros(); + let num_ref_bytes = (num_ref_bits + 7) / 8; + let has_idx = false; + + let mut full_size = 0u32; + + for cell in &self.cells { + full_size += raw_cell_size(cell, num_ref_bytes); + } + + let num_offset_bits = 32 - full_size.leading_zeros(); + let num_offset_bytes = (num_offset_bits + 7) / 8; + + let total_size = 4 + // magic + 1 + // flags and s_bytes + 1 + // offset_bytes + 3 * num_ref_bytes + // cells_num, roots, complete + num_offset_bytes + // full_size + num_ref_bytes + // root_idx + (if has_idx { self.cells.len() as u32 * num_offset_bytes } else { 0 }) + + full_size + + (if has_crc32 { 4 } else { 0 }); + + let mut writer = BitWriter::endian(Vec::with_capacity(total_size as usize), BigEndian); + + writer + .write(32, GENERIC_BOC_MAGIC) + .map_boc_serialization_error()?; + + //write flags byte + let has_cache_bits = false; + let flags: u8 = 0; + writer.write_bit(has_idx).map_boc_serialization_error()?; + writer.write_bit(has_crc32).map_boc_serialization_error()?; + writer + .write_bit(has_cache_bits) + .map_boc_serialization_error()?; + writer.write(2, flags).map_boc_serialization_error()?; + writer + .write(3, num_ref_bytes) + .map_boc_serialization_error()?; + writer + .write(8, num_offset_bytes) + .map_boc_serialization_error()?; + writer + .write(8 * num_ref_bytes, self.cells.len() as u32) + .map_boc_serialization_error()?; + writer + .write(8 * num_ref_bytes, root_count as u32) + .map_boc_serialization_error()?; + writer + .write(8 * num_ref_bytes, 0) + .map_boc_serialization_error()?; // Complete BOCs only + writer + .write(8 * num_offset_bytes, full_size) + .map_boc_serialization_error()?; + for &root in &self.roots { + writer + .write(8 * num_ref_bytes, root as u32) + .map_boc_serialization_error()?; + } + + for cell in &self.cells { + write_raw_cell(&mut writer, cell, num_ref_bytes)?; + } + + if has_crc32 { + let bytes = writer.writer().ok_or_else(|| { + TonCellError::boc_serialization_error("Stream is not byte-aligned") + })?; + let cs = CRC_32_ISCSI.checksum(bytes.as_slice()); + writer + .write_bytes(cs.to_le_bytes().as_slice()) + .map_boc_serialization_error()?; + } + writer.byte_align().map_boc_serialization_error()?; + let res = writer + .writer() + .ok_or_else(|| TonCellError::boc_serialization_error("Stream is not byte-aligned"))?; + Ok(res.clone()) + } +} + +fn read_cell( + reader: &mut ByteReader, BigEndian>, + size: u8, +) -> Result { + let d1 = reader.read::().map_boc_deserialization_error()?; + let d2 = reader.read::().map_boc_deserialization_error()?; + + let ref_num = d1 & 0b111; + let is_exotic = (d1 & 0b1000) != 0; + let has_hashes = (d1 & 0b10000) != 0; + let level_mask = (d1 >> 5) as u32; + let data_size = ((d2 >> 1) + (d2 & 1)).into(); + let full_bytes = (d2 & 0x01) == 0; + + if has_hashes { + let hash_count = LevelMask::new(level_mask).hash_count(); + let skip_size = hash_count * (32 + 2); + + // TODO: check depth and hashes + reader + .skip(skip_size as u32) + .map_boc_deserialization_error()?; + } + + let mut data = reader + .read_to_vec(data_size) + .map_boc_deserialization_error()?; + + let data_len = data.len(); + let padding_len = if data_len > 0 && !full_bytes { + // Fix last byte, + // see https://github.com/toncenter/tonweb/blob/c2d5d0fc23d2aec55a0412940ce6e580344a288c/src/boc/BitString.js#L302 + let num_zeros = data[data_len - 1].trailing_zeros(); + if num_zeros >= 8 { + return Err(TonCellError::boc_deserialization_error( + "Last byte of binary must not be zero if full_byte flag is not set", + )); + } + data[data_len - 1] &= !(1 << num_zeros); + num_zeros + 1 + } else { + 0 + }; + let bit_len = data.len() * 8 - padding_len as usize; + let mut references: Vec = Vec::new(); + for _ in 0..ref_num { + references.push(read_var_size(reader, size)?); + } + let cell = RawCell::new(data, bit_len, references, level_mask, is_exotic); + Ok(cell) +} + +fn raw_cell_size(cell: &RawCell, ref_size_bytes: u32) -> u32 { + let data_len = (cell.bit_len + 7) / 8; + 2 + data_len as u32 + cell.references.len() as u32 * ref_size_bytes +} + +fn write_raw_cell( + writer: &mut BitWriter, BigEndian>, + cell: &RawCell, + ref_size_bytes: u32, +) -> Result<(), TonCellError> { + let level = cell.level_mask; + let is_exotic = cell.is_exotic as u32; + let num_refs = cell.references.len() as u32; + let d1 = num_refs + is_exotic * 8 + level * 32; + + let padding_bits = cell.bit_len % 8; + let full_bytes = padding_bits == 0; + let data = cell.data.as_slice(); + let data_len_bytes = (cell.bit_len + 7) / 8; + // data_len_bytes <= 128 by spec, but d2 must be u8 by spec as well + let d2 = (data_len_bytes * 2 - if full_bytes { 0 } else { 1 }) as u8; //subtract 1 if the last byte is not full + + writer.write(8, d1).map_boc_serialization_error()?; + writer.write(8, d2).map_boc_serialization_error()?; + if !full_bytes { + writer + .write_bytes(&data[..data_len_bytes - 1]) + .map_boc_serialization_error()?; + let last_byte = data[data_len_bytes - 1]; + let l = last_byte | 1 << (8 - padding_bits - 1); + writer.write(8, l).map_boc_serialization_error()?; + } else { + writer.write_bytes(data).map_boc_serialization_error()?; + } + + for r in cell.references.as_slice() { + writer + .write(8 * ref_size_bytes, *r as u32) + .map_boc_serialization_error()?; + } + + Ok(()) +} + +fn read_var_size( + reader: &mut ByteReader, BigEndian>, + n: u8, +) -> Result { + let bytes = reader + .read_to_vec(n.into()) + .map_boc_deserialization_error()?; + + let mut result = 0; + for &byte in &bytes { + result <<= 8; + result |= usize::from(byte); + } + Ok(result) +} + +#[cfg(test)] +mod tests { + + use super::*; + + #[test] + fn test_raw_cell_serialize() { + let raw_cell = RawCell::new(vec![1; 128], 1023, vec![], 255, false); + let raw_bag = RawBagOfCells { + cells: vec![raw_cell], + roots: vec![0], + }; + assert!(raw_bag.serialize(false).is_ok()); + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/raw_boc_from_boc.rs b/token-core/tcx-libs/tonlib-core/src/cell/raw_boc_from_boc.rs new file mode 100644 index 00000000..55bb5be9 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/raw_boc_from_boc.rs @@ -0,0 +1,153 @@ +use std::cell::RefCell; +use std::collections::HashMap; +use std::sync::Arc; + +use crate::cell::{ArcCell, BagOfCells, Cell, RawBagOfCells, RawCell, TonCellError}; +use crate::TonHash; + +#[derive(Debug, Clone)] +struct IndexedCell { + index: usize, + cell: ArcCell, +} + +pub(crate) fn convert_to_raw_boc(boc: &BagOfCells) -> Result { + let cells_by_hash = build_and_verify_index(&boc.roots); + + // Sort indexed cells by their index value. + let mut index_slice: Vec<_> = cells_by_hash.values().collect(); + index_slice.sort_unstable_by(|a, b| a.borrow().index.cmp(&b.borrow().index)); + + // Remove gaps in indices. + index_slice + .iter() + .enumerate() + .for_each(|(real_index, indexed_cell)| indexed_cell.borrow_mut().index = real_index); + + let cells_iter = index_slice + .into_iter() + .map(|indexed_cell| indexed_cell.borrow().cell.clone()); + let raw_cells = raw_cells_from_cells(cells_iter, &cells_by_hash)?; + let root_indices = root_indices(&boc.roots, &cells_by_hash)?; + + Ok(RawBagOfCells { + cells: raw_cells, + roots: root_indices, + }) +} + +fn build_and_verify_index(roots: &[ArcCell]) -> HashMap> { + let mut current_cells: Vec<_> = roots.iter().map(Arc::clone).collect(); + let mut new_hash_index = 0; + let mut cells_by_hash = HashMap::new(); + + // Process cells to build the initial index. + while !current_cells.is_empty() { + let mut next_cells = Vec::with_capacity(current_cells.len() * 4); + for cell in current_cells.iter() { + let hash = cell.cell_hash(); + + if cells_by_hash.contains_key(&hash) { + continue; // Skip if already indexed. + } + + cells_by_hash.insert( + hash, + RefCell::new(IndexedCell { + cell: Arc::clone(cell), + index: new_hash_index, + }), + ); + + new_hash_index += 1; + next_cells.extend(cell.references.clone()); // Add referenced cells for the next iteration. + } + + current_cells = next_cells; + } + + // Ensure indices are in the correct order based on cell references. + let mut verify_order = true; + while verify_order { + verify_order = false; + + for index_cell in cells_by_hash.values() { + for reference in index_cell.borrow().cell.references.iter() { + let ref_hash = reference.cell_hash(); + if let Some(id_ref) = cells_by_hash.get(&ref_hash) { + if id_ref.borrow().index < index_cell.borrow().index { + id_ref.borrow_mut().index = new_hash_index; + new_hash_index += 1; + verify_order = true; // Reverify if an index was updated. + } + } + } + } + } + + cells_by_hash +} + +fn root_indices( + roots: &[ArcCell], + cells_dict: &HashMap>, +) -> Result, TonCellError> { + roots + .iter() + .map(|root_cell| root_cell.cell_hash()) + .map(|root_cell_hash| { + cells_dict + .get(&root_cell_hash) + .map(|index_record| index_record.borrow().index) + .ok_or_else(|| { + TonCellError::BagOfCellsSerializationError(format!( + "Couldn't find cell with hash {root_cell_hash:?} while searching for roots" + )) + }) + }) + .collect() +} + +fn raw_cells_from_cells( + cells: impl Iterator, + cells_by_hash: &HashMap>, +) -> Result, TonCellError> { + cells + .map(|cell| raw_cell_from_cell(&cell, cells_by_hash)) + .collect() +} + +fn raw_cell_from_cell( + cell: &Cell, + cells_by_hash: &HashMap>, +) -> Result { + raw_cell_reference_indices(cell, cells_by_hash).map(|reference_indices| { + RawCell::new( + cell.data.clone(), + cell.bit_len, + reference_indices, + cell.get_level_mask(), + cell.is_exotic(), + ) + }) +} + +fn raw_cell_reference_indices( + cell: &Cell, + cells_by_hash: &HashMap>, +) -> Result, TonCellError> { + cell.references + .iter() + .map(|cell| { + cells_by_hash + .get(&cell.cell_hash()) + .ok_or_else(|| { + TonCellError::BagOfCellsSerializationError(format!( + "Couldn't find cell with hash {:?} while searching for references", + cell.cell_hash() + )) + }) + .map(|cell| cell.borrow().index) + }) + .collect() +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/slice.rs b/token-core/tcx-libs/tonlib-core/src/cell/slice.rs new file mode 100644 index 00000000..9186e4fa --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/slice.rs @@ -0,0 +1,107 @@ +use std::io::Cursor; +use std::sync::Arc; + +use bitstream_io::{BigEndian, BitRead, BitReader}; + +use crate::cell::util::BitReadExt; +use crate::cell::{ArcCell, Cell, MapTonCellError, TonCellError}; + +#[derive(Debug, Clone, PartialEq)] +pub struct CellSlice { + pub cell: ArcCell, + pub start_bit: usize, + pub end_bit: usize, + pub start_ref: usize, + pub end_ref: usize, +} + +impl CellSlice { + pub fn new( + cell: &ArcCell, + start_bit: usize, + end_bit: usize, + start_ref: usize, + end_ref: usize, + ) -> Result { + if end_bit < start_bit || end_bit > cell.bit_len { + return Err(TonCellError::CellParserError(format!( + "Invalid bit offsets: start: {}, end: {}, bit_len: {}", + start_bit, end_bit, cell.bit_len + ))); + } + if end_ref < start_ref || end_ref > cell.references.len() { + return Err(TonCellError::CellParserError(format!( + "Invalid references: start: {}, end: {}, count: {}", + start_bit, + end_bit, + cell.references.len() + ))); + } + Ok(CellSlice { + cell: cell.clone(), + start_bit, + end_bit, + start_ref, + end_ref, + }) + } + + pub fn new_with_offset(cell: &Cell, offset: usize) -> Result { + CellSlice::new( + &Arc::new(cell.clone()), + offset, + cell.bit_len, + 0, + cell.references.len(), + ) + } + + pub fn full_cell(cell: Cell) -> Result { + let bit_len = cell.bit_len; + let ref_count = cell.references.len(); + Ok(CellSlice { + cell: Arc::new(cell), + start_bit: 0, + end_bit: bit_len, + start_ref: 0, + end_ref: ref_count, + }) + } + + pub fn reference(&self, idx: usize) -> Result<&ArcCell, TonCellError> { + if idx > self.end_ref - self.start_ref { + return Err(TonCellError::InvalidIndex { + idx, + ref_count: self.end_ref - self.start_ref, + }); + } + self.cell + .references + .get(self.start_ref + idx) + .ok_or(TonCellError::InvalidIndex { + idx, + ref_count: self.end_ref - self.start_ref, + }) + } + + /// Converts the slice to full `Cell` dropping references to original cell. + pub fn into_cell(&self) -> Result { + let bit_len = self.end_bit - self.start_bit; + let total_bytes = (bit_len + 7) / 8; + let mut data = vec![0u8; total_bytes]; + let cursor = Cursor::new(&self.cell.data); + let mut bit_reader: BitReader>, BigEndian> = + BitReader::endian(cursor, BigEndian); + bit_reader + .skip(self.start_bit as u32) + .map_cell_parser_error()?; + bit_reader.read_bits(bit_len, data.as_mut_slice())?; + + Cell::new( + data, + bit_len, + self.cell.references[self.start_ref..self.end_ref].to_vec(), + false, + ) + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/state_init.rs b/token-core/tcx-libs/tonlib-core/src/cell/state_init.rs new file mode 100644 index 00000000..c2546b91 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/state_init.rs @@ -0,0 +1,129 @@ +use super::ArcCell; +use crate::cell::{Cell, CellBuilder, TonCellError}; +use crate::TonHash; + +pub struct StateInitBuilder { + code: Option, + data: Option, + split_depth: bool, + tick_tock: bool, + library: bool, +} +pub struct StateInit { + pub code: Option, + pub data: Option, +} + +impl StateInitBuilder { + pub fn new(code: &ArcCell, data: &ArcCell) -> StateInitBuilder { + StateInitBuilder { + code: Some(code.clone()), + data: Some(data.clone()), + split_depth: false, + tick_tock: false, + library: false, + } + } + + pub fn with_split_depth(&mut self, split_depth: bool) -> &mut Self { + self.split_depth = split_depth; + self + } + + pub fn with_tick_tock(&mut self, tick_tock: bool) -> &mut Self { + self.tick_tock = tick_tock; + self + } + + pub fn with_library(&mut self, library: bool) -> &mut Self { + self.library = library; + self + } + + pub fn build(&self) -> Result { + let mut builder = CellBuilder::new(); + builder + .store_bit(self.split_depth)? //Split depth + .store_bit(self.tick_tock)? //Tick tock + .store_bit(self.code.is_some())? //Code + .store_bit(self.data.is_some())? //Data + .store_bit(self.library)?; //Library + if let Some(code) = &self.code { + builder.store_reference(code)?; + } + if let Some(data) = &self.data { + builder.store_reference(data)?; + } + builder.build() + } +} + +impl StateInit { + pub fn create_account_id(code: &ArcCell, data: &ArcCell) -> Result { + Ok(StateInitBuilder::new(code, data) + .with_library(false) + .build()? + .cell_hash()) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::StateInitBuilder; + use crate::cell::{CellBuilder, TonCellError}; + + #[test] + fn test_state_init() -> Result<(), TonCellError> { + let code = Arc::new(CellBuilder::new().store_string("code")?.build()?); + let data = Arc::new(CellBuilder::new().store_string("data")?.build()?); + let state_init = StateInitBuilder::new(&code, &data) + .with_split_depth(true) + .with_tick_tock(true) + .with_library(true) + .build()?; + + assert_eq!(state_init.data[0], 0b11111000); + println!("{:08b}", state_init.data[0]); + + let code = Arc::new(CellBuilder::new().store_string("code")?.build()?); + let data = Arc::new(CellBuilder::new().store_string("data")?.build()?); + let state_init = StateInitBuilder::new(&code, &data) + .with_split_depth(false) + .with_tick_tock(false) + .with_library(false) + .build()?; + + assert_eq!(state_init.data[0], 0b00110000); + + let code = Arc::new(CellBuilder::new().store_string("code")?.build()?); + let data = Arc::new(CellBuilder::new().store_string("data")?.build()?); + let state_init = StateInitBuilder::new(&code, &data) + .with_split_depth(true) + .with_tick_tock(false) + .with_library(false) + .build()?; + + assert_eq!(state_init.data[0], 0b10110000); + + let code = Arc::new(CellBuilder::new().store_string("code")?.build()?); + let data = Arc::new(CellBuilder::new().store_string("data")?.build()?); + let state_init = StateInitBuilder::new(&code, &data) + .with_split_depth(false) + .with_tick_tock(true) + .with_library(false) + .build()?; + assert_eq!(state_init.data[0], 0b01110000); + + let code = Arc::new(CellBuilder::new().store_string("code")?.build()?); + let data = Arc::new(CellBuilder::new().store_string("data")?.build()?); + let state_init = StateInitBuilder::new(&code, &data) + .with_split_depth(false) + .with_tick_tock(false) + .with_library(true) + .build()?; + assert_eq!(state_init.data[0], 0b00111000); + Ok(()) + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/cell/util.rs b/token-core/tcx-libs/tonlib-core/src/cell/util.rs new file mode 100644 index 00000000..3396eafa --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/cell/util.rs @@ -0,0 +1,34 @@ +use std::io; + +use bitstream_io::{BitRead, BitReader, Endianness}; + +use crate::cell::{MapTonCellError, TonCellError}; + +pub trait BitReadExt { + fn read_bits(&mut self, num_bits: usize, slice: &mut [u8]) -> Result<(), TonCellError>; +} + +impl BitReadExt for BitReader { + fn read_bits(&mut self, num_bits: usize, slice: &mut [u8]) -> Result<(), TonCellError> { + let total_bytes = (num_bits + 7) / 8; + if total_bytes > slice.len() { + let msg = format!( + "Attempt to read {} bits into buffer {} bytes", + num_bits, + slice.len() + ); + return Err(TonCellError::CellParserError(msg)); + } + let full_bytes = (num_bits) / 8; + self.read_bytes(&mut slice[0..full_bytes]) + .map_cell_parser_error()?; + let last_byte_len = num_bits % 8; + if last_byte_len != 0 { + let last_byte = self + .read::(last_byte_len as u32) + .map_cell_parser_error()?; + slice[full_bytes] = last_byte << (8 - last_byte_len); + } + Ok(()) + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/lib.rs b/token-core/tcx-libs/tonlib-core/src/lib.rs new file mode 100644 index 00000000..4468d8e6 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/lib.rs @@ -0,0 +1,10 @@ +pub mod cell; +// pub mod constants; +// pub mod message; +// pub mod mnemonic; +pub mod types; +// pub mod wallet; + +pub use crate::types::{ + TonAddress, TonAddressParseError, TonHash, +}; diff --git a/token-core/tcx-libs/tonlib-core/src/types.rs b/token-core/tcx-libs/tonlib-core/src/types.rs new file mode 100644 index 00000000..29f2f7f2 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/types.rs @@ -0,0 +1,14 @@ +mod address; +mod error; + +pub use address::*; +pub use error::*; + +pub const TON_HASH_BYTES: usize = 32; +pub const ZERO_HASH: TonHash = [0; 32]; +pub type TonHash = [u8; TON_HASH_BYTES]; + +pub const DEFAULT_CELL_HASH: TonHash = [ + 150, 162, 150, 210, 36, 242, 133, 198, 123, 238, 147, 195, 15, 138, 48, 145, 87, 240, 218, 163, + 93, 197, 184, 126, 65, 11, 120, 99, 10, 9, 207, 199, +]; diff --git a/token-core/tcx-libs/tonlib-core/src/types/address.rs b/token-core/tcx-libs/tonlib-core/src/types/address.rs new file mode 100644 index 00000000..5260d04d --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/types/address.rs @@ -0,0 +1,486 @@ +use std::fmt::{Debug, Display, Formatter}; +use std::str::FromStr; + +use base64::engine::general_purpose::{STANDARD_NO_PAD, URL_SAFE_NO_PAD}; +use base64::Engine; +use crc::Crc; +use lazy_static::lazy_static; +use serde::de::{Error, Visitor}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +use super::{TonAddressParseError, TonHash, TON_HASH_BYTES}; + +lazy_static! { + pub static ref CRC_16_XMODEM: Crc = Crc::::new(&crc::CRC_16_XMODEM); +} + +#[derive(PartialEq, Eq, Clone, Hash)] +pub struct TonAddress { + pub workchain: i32, + pub hash_part: TonHash, +} + +impl TonAddress { + pub const NULL: TonAddress = TonAddress { + workchain: 0, + hash_part: [0; TON_HASH_BYTES], + }; + + pub fn new(workchain: i32, hash_part: &TonHash) -> TonAddress { + TonAddress { + workchain, + hash_part: *hash_part, + } + } + + pub fn null() -> TonAddress { + TonAddress::NULL.clone() + } + + pub fn from_hex_str(s: &str) -> Result { + let parts: Vec<&str> = s.split(':').collect(); + + if parts.len() != 2 { + return Err(TonAddressParseError::new( + s, + "Invalid hex address string: wrong address format", + )); + } + + let maybe_wc = parts[0].parse::(); + let wc = match maybe_wc { + Ok(wc) => wc, + Err(_) => { + return Err(TonAddressParseError::new( + s, + "Invalid hex address string: parse int error", + )) + } + }; + + let maybe_decoded_hash_part = hex::decode(parts[1]); + let decoded_hash_part = match maybe_decoded_hash_part { + Ok(decoded_hash_part) => decoded_hash_part, + Err(_) => { + return Err(TonAddressParseError::new( + s, + "Invalid hex address string: base64 decode error", + )) + } + }; + + let maybe_hash_part = decoded_hash_part.as_slice().try_into(); + let hash_part = match maybe_hash_part { + Ok(hash_part) => hash_part, + Err(_) => { + return Err(TonAddressParseError::new( + s, + "Invalid hex address string: unexpected error", + )) + } + }; + + let addr = TonAddress::new(wc, &hash_part); + Ok(addr) + } + + pub fn from_base64_url(s: &str) -> Result { + Ok(Self::from_base64_url_flags(s)?.0) + } + + /// Parses url-safe base64 representation of an address + /// + /// # Returns + /// the address, non-bounceable flag, non-production flag. + pub fn from_base64_url_flags( + s: &str, + ) -> Result<(TonAddress, bool, bool), TonAddressParseError> { + if s.len() != 48 { + return Err(TonAddressParseError::new( + s, + "Invalid base64url address: Wrong length", + )); + } + let maybe_bytes = URL_SAFE_NO_PAD.decode(s); + let bytes = match maybe_bytes { + Ok(bytes) => bytes, + Err(_) => { + return Err(TonAddressParseError::new( + s, + "Invalid base64url address: Base64 decode error", + )) + } + }; + let maybe_slice = bytes.as_slice().try_into(); + let slice = match maybe_slice { + Ok(slice) => slice, + Err(_) => { + return Err(TonAddressParseError::new( + s, + "Invalid base64url address: Unexpected error", + )) + } + }; + + Self::from_base64_src(slice, s) + } + + pub fn from_base64_std(s: &str) -> Result { + Ok(Self::from_base64_std_flags(s)?.0) + } + + /// Parses standard base64 representation of an address + /// + /// # Returns + /// the address, non-bounceable flag, non-production flag. + pub fn from_base64_std_flags( + s: &str, + ) -> Result<(TonAddress, bool, bool), TonAddressParseError> { + if s.len() != 48 { + return Err(TonAddressParseError::new( + s, + "Invalid base64std address: Invalid length", + )); + } + + let maybe_vec = STANDARD_NO_PAD.decode(s); + let vec = match maybe_vec { + Ok(bytes) => bytes, + Err(_) => { + return Err(TonAddressParseError::new( + s, + "Invalid base64std address: Base64 decode error", + )) + } + }; + let maybe_bytes = vec.as_slice().try_into(); + let bytes = match maybe_bytes { + Ok(b) => b, + Err(_) => { + return Err(TonAddressParseError::new( + s, + "Invalid base64std: Unexpected error", + )) + } + }; + + Self::from_base64_src(bytes, s) + } + + /// Parses decoded base64 representation of an address + /// + /// # Returns + /// the address, non-bounceable flag, non-production flag. + fn from_base64_src( + bytes: &[u8; 36], + src: &str, + ) -> Result<(TonAddress, bool, bool), TonAddressParseError> { + let (non_production, non_bounceable) = match bytes[0] { + 0x11 => (false, false), + 0x51 => (false, true), + 0x91 => (true, false), + 0xD1 => (true, true), + _ => { + return Err(TonAddressParseError::new( + src, + "Invalid base64src address: Wrong tag byte", + )) + } + }; + let workchain = bytes[1] as i8 as i32; + let calc_crc = CRC_16_XMODEM.checksum(&bytes[0..34]); + let addr_crc = ((bytes[34] as u16) << 8) | bytes[35] as u16; + if calc_crc != addr_crc { + return Err(TonAddressParseError::new( + src, + "Invalid base64src address: CRC mismatch", + )); + } + let mut hash_part = [0_u8; 32]; + hash_part.clone_from_slice(&bytes[2..34]); + let addr = TonAddress { + workchain, + hash_part, + }; + Ok((addr, non_bounceable, non_production)) + } + + pub fn to_hex(&self) -> String { + format!("{}:{}", self.workchain, hex::encode(self.hash_part)) + } + + pub fn to_base64_url(&self) -> String { + self.to_base64_url_flags(false, false) + } + + pub fn to_base64_url_flags(&self, non_bounceable: bool, non_production: bool) -> String { + let mut buf: [u8; 36] = [0; 36]; + self.to_base64_src(&mut buf, non_bounceable, non_production); + URL_SAFE_NO_PAD.encode(buf) + } + + pub fn to_base64_std(&self) -> String { + self.to_base64_std_flags(false, false) + } + + pub fn to_base64_std_flags(&self, non_bounceable: bool, non_production: bool) -> String { + let mut buf: [u8; 36] = [0; 36]; + self.to_base64_src(&mut buf, non_bounceable, non_production); + STANDARD_NO_PAD.encode(buf) + } + + fn to_base64_src(&self, bytes: &mut [u8; 36], non_bounceable: bool, non_production: bool) { + let tag: u8 = match (non_production, non_bounceable) { + (false, false) => 0x11, + (false, true) => 0x51, + (true, false) => 0x91, + (true, true) => 0xD1, + }; + bytes[0] = tag; + bytes[1] = (self.workchain & 0xff) as u8; + bytes[2..34].clone_from_slice(&self.hash_part); + let crc = CRC_16_XMODEM.checksum(&bytes[0..34]); + bytes[34] = ((crc >> 8) & 0xff) as u8; + bytes[35] = (crc & 0xff) as u8; + } +} + +impl Display for TonAddress { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(self.to_base64_url().as_str()) + } +} + +impl Debug for TonAddress { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(self.to_base64_url().as_str()) + } +} + +impl FromStr for TonAddress { + type Err = TonAddressParseError; + + fn from_str(s: &str) -> Result { + if s.len() == 48 { + // Some form of base64 address, check which one + if s.contains('-') || s.contains('_') { + TonAddress::from_base64_url(s) + } else { + TonAddress::from_base64_std(s) + } + } else { + TonAddress::from_hex_str(s) + } + } +} + +impl TryFrom for TonAddress { + type Error = TonAddressParseError; + + fn try_from(value: String) -> Result { + Self::from_str(value.as_str()) + } +} + +impl Serialize for TonAddress { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(self.to_base64_url().as_str()) + } +} + +struct TonAddressVisitor; + +impl<'de> Visitor<'de> for TonAddressVisitor { + type Value = TonAddress; + + fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { + formatter.write_str("an string representing TON address in Hex or Base64 format") + } + + fn visit_str(self, v: &str) -> Result + where + E: Error, + { + v.parse().map_err(E::custom) + } +} + +impl<'de> Deserialize<'de> for TonAddress { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_str(TonAddressVisitor) + } +} + +#[cfg(test)] +mod tests { + + use serde_json::Value; + + use super::TonAddressParseError; + use crate::{TonAddress, TonHash}; + + #[test] + fn format_works() -> Result<(), TonAddressParseError> { + let bytes: TonHash = + hex::decode("e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76") + .unwrap() + .as_slice() + .try_into() + .unwrap(); + let addr = TonAddress::new(0, &bytes); + assert_eq!( + addr.to_hex(), + "0:e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76" + ); + assert_eq!( + addr.to_base64_url(), + "EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjrR" + ); + assert_eq!( + addr.to_base64_std(), + "EQDk2VTvn04SUKJrW7rXahzdF8/Qi6utb0wj43InCu9vdjrR" + ); + Ok(()) + } + + #[test] + fn parse_format_works() -> Result<(), TonAddressParseError> { + let bytes: TonHash = + hex::decode("e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76") + .unwrap() + .as_slice() + .try_into() + .unwrap(); + let addr = TonAddress::new(0, &bytes); + assert_eq!( + TonAddress::from_hex_str( + "0:e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76" + )?, + addr + ); + assert_eq!( + TonAddress::from_base64_url("EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjrR")?, + addr + ); + assert_eq!( + TonAddress::from_base64_std("EQDk2VTvn04SUKJrW7rXahzdF8/Qi6utb0wj43InCu9vdjrR")?, + addr + ); + Ok(()) + } + + #[test] + fn parse_works() -> Result<(), TonAddressParseError> { + let bytes: TonHash = + hex::decode("e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76") + .unwrap() + .as_slice() + .try_into() + .unwrap(); + let addr = TonAddress::new(0, &bytes); + assert_eq!( + "0:e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76" + .parse::()?, + addr + ); + assert_eq!( + "EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjrR".parse::()?, + addr + ); + assert_eq!( + "EQDk2VTvn04SUKJrW7rXahzdF8/Qi6utb0wj43InCu9vdjrR".parse::()?, + addr + ); + Ok(()) + } + + #[test] + fn try_from_works() -> Result<(), TonAddressParseError> { + let bytes: TonHash = + hex::decode("e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76") + .unwrap() + .as_slice() + .try_into() + .unwrap(); + let addr = TonAddress::new(0, &bytes); + let res: TonAddress = "EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjrR" + .to_string() + .try_into()?; + assert_eq!(res, addr); + Ok(()) + } + + #[test] + fn parse_verifies_crc() -> Result<(), TonAddressParseError> { + let res = "EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjra".parse::(); + assert!(res.is_err()); + Ok(()) + } + + #[test] + fn serialization_works() -> Result<(), TonAddressParseError> { + let expected = "\"EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjrR\""; + + let res = "EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjrR".parse::()?; + let serial = serde_json::to_string(&res).unwrap(); + println!("{}", serial); + assert_eq!(serial.as_str(), expected); + + let res = "0:e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76" + .parse::()?; + let serial = serde_json::to_string(&res).unwrap(); + println!("{}", serial); + assert_eq!(serial.as_str(), expected); + + let res = "EQDk2VTvn04SUKJrW7rXahzdF8/Qi6utb0wj43InCu9vdjrR".parse::()?; + let serial = serde_json::to_string(&res).unwrap(); + println!("{}", serial); + assert_eq!(serial.as_str(), expected); + + Ok(()) + } + + #[test] + fn deserialization_works() -> Result<(), TonAddressParseError> { + let address = "EQDk2VTvn04SUKJrW7rXahzdF8_Qi6utb0wj43InCu9vdjrR"; + let a = format!("\"{}\"", address); + let deserial: TonAddress = serde_json::from_str(a.as_str()).unwrap(); + let expected = address.parse()?; + println!("{}", deserial); + assert_eq!(deserial, expected); + + let address = "EQDk2VTvn04SUKJrW7rXahzdF8/Qi6utb0wj43InCu9vdjrR"; + let a = format!("\"{}\"", address); + let deserial: TonAddress = serde_json::from_str(a.as_str()).unwrap(); + let expected = address.parse()?; + println!("{}", deserial); + assert_eq!(deserial, expected); + + let address = "0:e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76"; + let a = format!("\"{}\"", address); + let deserial: TonAddress = serde_json::from_str(a.as_str()).unwrap(); + let expected = address.parse()?; + println!("{}", deserial); + assert_eq!(deserial, expected); + + let address = + String::from("0:e4d954ef9f4e1250a26b5bbad76a1cdd17cfd08babad6f4c23e372270aef6f76"); + let deserial: TonAddress = serde_json::from_value(Value::String(address.clone())).unwrap(); + let expected = address.clone().parse()?; + println!("{}", deserial); + assert_eq!(deserial, expected); + + let address = "124"; + let a = format!("\"{}\"", address); + let deserial: serde_json::Result = serde_json::from_str(a.as_str()); + assert!(deserial.is_err()); + + Ok(()) + } +} diff --git a/token-core/tcx-libs/tonlib-core/src/types/error.rs b/token-core/tcx-libs/tonlib-core/src/types/error.rs new file mode 100644 index 00000000..17b50dc0 --- /dev/null +++ b/token-core/tcx-libs/tonlib-core/src/types/error.rs @@ -0,0 +1,17 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +#[error("Invalid address (Address: {address}, message: {message})")] +pub struct TonAddressParseError { + address: String, + message: String, +} + +impl TonAddressParseError { + pub fn new(address: A, message: M) -> TonAddressParseError { + TonAddressParseError { + address: address.to_string(), + message: message.to_string(), + } + } +}