diff --git a/.gitignore b/.gitignore index 144f0af73c..c6be219244 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ output.txt **/katana-logs crates/benches/bench_results.txt **/generated +.vscode diff --git a/.gitmodules b/.gitmodules index 2ec82617d0..4882156aee 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ -[submodule "crates/katana/core/contracts/messaging/solidity/lib/forge-std"] - path = crates/katana/core/contracts/messaging/solidity/lib/forge-std +[submodule "crates/katana/primitives/contracts/messaging/solidity/lib/forge-std"] + path = crates/katana/primitives/contracts/messaging/solidity/lib/forge-std url = https://github.com/foundry-rs/forge-std diff --git a/Cargo.lock b/Cargo.lock index a994c185e1..ba897b4e69 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -570,7 +570,7 @@ dependencies = [ "futures-util", "handlebars", "http", - "indexmap 2.2.2", + "indexmap 2.2.5", "mime", "multer", "num-traits 0.2.17", @@ -621,7 +621,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "323a5143f5bdd2030f45e3f2e0c821c9b1d36e79cf382129c64299c50a7f3750" dependencies = [ "bytes", - "indexmap 2.2.2", + "indexmap 2.2.5", "serde", "serde_json", ] @@ -957,7 +957,7 @@ dependencies = [ [[package]] name = "benches" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "clap", @@ -975,6 +975,7 @@ dependencies = [ "serde", "serde_json", "sozo", + "sozo-ops", "starknet 0.9.0", "tokio", ] @@ -1125,7 +1126,7 @@ dependencies = [ "cairo-vm", "ctor", "derive_more", - "indexmap 2.2.2", + "indexmap 2.2.5", "itertools 0.10.5", "keccak", "log", @@ -2012,7 +2013,7 @@ checksum = "12d0939f42d40fb1d975cae073d7d4f82d83de4ba2149293115525245425f909" dependencies = [ "env_logger", "hashbrown 0.14.3", - "indexmap 2.2.2", + "indexmap 2.2.5", "itertools 0.11.0", "log", "num-bigint", @@ -2224,6 +2225,12 @@ dependencies = [ "thiserror", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cexpr" version = "0.6.0" @@ -2478,9 +2485,19 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "common" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "reqwest", @@ -3209,7 +3226,7 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dojo-bindgen" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "async-trait", "cainome 0.1.5", @@ -3224,15 +3241,15 @@ dependencies = [ [[package]] name = "dojo-core" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" [[package]] name = "dojo-examples-spawn-and-move" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" [[package]] name = "dojo-lang" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3282,7 +3299,7 @@ dependencies = [ [[package]] name = "dojo-language-server" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3304,7 +3321,7 @@ dependencies = [ [[package]] name = "dojo-test-utils" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "assert_fs", @@ -3337,7 +3354,7 @@ dependencies = [ [[package]] name = "dojo-types" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "crypto-bigint", "hex", @@ -3352,7 +3369,7 @@ dependencies = [ [[package]] name = "dojo-world" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "assert_fs", @@ -3387,7 +3404,7 @@ dependencies = [ [[package]] name = "dojo-world-abigen" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "cairo-lang-starknet", "camino", @@ -5329,7 +5346,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 2.2.2", + "indexmap 2.2.5", "slab", "tokio", "tokio-util", @@ -5929,9 +5946,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.2" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "824b2ae422412366ba479e8111fd301f7b5faece8149317bb81925979a53f520" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" dependencies = [ "equivalent", "hashbrown 0.14.3", @@ -6189,6 +6206,28 @@ dependencies = [ "libc", ] +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + [[package]] name = "jobserver" version = "0.1.27" @@ -6525,7 +6564,7 @@ dependencies = [ [[package]] name = "katana" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "assert_matches", @@ -6537,7 +6576,7 @@ dependencies = [ "katana-primitives", "katana-rpc", "katana-rpc-api", - "metrics 0.6.0-alpha.5", + "metrics 0.6.0-alpha.7", "metrics-process", "serde_json", "shellexpand", @@ -6550,7 +6589,7 @@ dependencies = [ [[package]] name = "katana-codecs" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "bytes", "katana-primitives", @@ -6558,7 +6597,7 @@ dependencies = [ [[package]] name = "katana-codecs-derive" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "proc-macro2", "quote", @@ -6568,7 +6607,7 @@ dependencies = [ [[package]] name = "katana-core" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "assert_matches", @@ -6604,7 +6643,7 @@ dependencies = [ [[package]] name = "katana-db" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "blockifier", @@ -6626,10 +6665,11 @@ dependencies = [ [[package]] name = "katana-executor" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "blockifier", + "cairo-vm", "convert_case 0.6.0", "futures", "katana-primitives", @@ -6643,7 +6683,7 @@ dependencies = [ [[package]] name = "katana-primitives" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "base64 0.21.7", @@ -6669,7 +6709,7 @@ dependencies = [ [[package]] name = "katana-provider" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "auto_impl", @@ -6694,7 +6734,7 @@ dependencies = [ [[package]] name = "katana-rpc" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "assert_matches", @@ -6728,7 +6768,7 @@ dependencies = [ [[package]] name = "katana-rpc-api" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "jsonrpsee 0.16.3", "katana-core", @@ -6739,7 +6779,7 @@ dependencies = [ [[package]] name = "katana-rpc-types" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "derive_more", @@ -6758,7 +6798,7 @@ dependencies = [ [[package]] name = "katana-rpc-types-builder" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "katana-executor", @@ -6770,7 +6810,7 @@ dependencies = [ [[package]] name = "katana-runner" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "chrono", @@ -6789,7 +6829,7 @@ dependencies = [ [[package]] name = "katana-tasks" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "futures", "rayon", @@ -7587,6 +7627,15 @@ dependencies = [ "libc", ] +[[package]] +name = "malloc_buf" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb" +dependencies = [ + "libc", +] + [[package]] name = "match_cfg" version = "0.1.0" @@ -7653,7 +7702,7 @@ dependencies = [ [[package]] name = "metrics" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "hyper", @@ -7986,6 +8035,12 @@ dependencies = [ "rawpointer", ] +[[package]] +name = "ndk-context" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" + [[package]] name = "netlink-packet-core" version = "0.4.2" @@ -8343,6 +8398,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" +[[package]] +name = "objc" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" +dependencies = [ + "malloc_buf", +] + [[package]] name = "object" version = "0.32.2" @@ -8763,7 +8827,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.2.2", + "indexmap 2.2.5", ] [[package]] @@ -9527,6 +9591,12 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "raw-window-handle" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2ff9a1f06a88b01621b7ae906ef0211290d1c8a168a15542486a8f61c0833b9" + [[package]] name = "rawpointer" version = "0.2.1" @@ -9726,7 +9796,7 @@ dependencies = [ "bitflags 2.4.2", "byteorder", "derive_more", - "indexmap 2.2.2", + "indexmap 2.2.5", "libc", "parking_lot 0.12.1", "reth-mdbx-sys", @@ -9954,7 +10024,7 @@ checksum = "e666a5496a0b2186dbcd0ff6106e29e093c15591bde62c20d3842007c6978a09" [[package]] name = "runner-macro" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "quote", "syn 2.0.48", @@ -10181,7 +10251,7 @@ dependencies = [ [[package]] name = "saya" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "clap", @@ -10200,7 +10270,7 @@ dependencies = [ [[package]] name = "saya-core" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "async-trait", @@ -10605,10 +10675,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.113" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ + "indexmap 2.2.5", "itoa", "ryu", "serde", @@ -11002,11 +11073,62 @@ dependencies = [ [[package]] name = "sozo" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" +dependencies = [ + "anyhow", + "assert_fs", + "async-trait", + "cainome 0.1.5", + "cairo-lang-compiler", + "cairo-lang-defs", + "cairo-lang-filesystem", + "cairo-lang-plugins", + "cairo-lang-project", + "cairo-lang-sierra", + "cairo-lang-sierra-to-casm", + "cairo-lang-starknet", + "cairo-lang-test-plugin", + "cairo-lang-test-runner", + "cairo-lang-utils", + "camino", + "clap", + "clap-verbosity-flag", + "clap_complete", + "console", + "dojo-bindgen", + "dojo-lang", + "dojo-test-utils", + "dojo-types", + "dojo-world", + "futures", + "katana-runner", + "notify", + "notify-debouncer-mini", + "scarb", + "scarb-ui", + "semver 1.0.21", + "serde", + "serde_json", + "smol_str", + "snapbox", + "sozo-ops", + "starknet 0.9.0", + "starknet-crypto 0.6.1", + "thiserror", + "tokio", + "tracing", + "tracing-log 0.1.4", + "url", +] + +[[package]] +name = "sozo-ops" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "assert_fs", "async-trait", + "cainome 0.1.5", "cairo-lang-compiler", "cairo-lang-defs", "cairo-lang-filesystem", @@ -11050,7 +11172,7 @@ dependencies = [ [[package]] name = "sozo-signers" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "starknet 0.9.0", @@ -11141,7 +11263,7 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap 2.2.2", + "indexmap 2.2.5", "log", "memchr", "once_cell", @@ -11613,7 +11735,7 @@ dependencies = [ "cairo-lang-starknet", "derive_more", "hex", - "indexmap 2.2.2", + "indexmap 2.2.5", "once_cell", "primitive-types", "serde", @@ -12184,7 +12306,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.5", "serde", "serde_spanned", "toml_datetime", @@ -12197,7 +12319,7 @@ version = "0.20.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.5", "toml_datetime", "winnow", ] @@ -12208,7 +12330,7 @@ version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.5", "toml_datetime", "winnow", ] @@ -12219,7 +12341,7 @@ version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c9ffdf896f8daaabf9b66ba8e77ea1ed5ed0f72821b398aba62352e95062951" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.5", "serde", "serde_spanned", "toml_datetime", @@ -12361,7 +12483,7 @@ dependencies = [ [[package]] name = "torii" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "async-trait", @@ -12381,7 +12503,7 @@ dependencies = [ "hyper-reverse-proxy", "indexmap 1.9.3", "lazy_static", - "metrics 0.6.0-alpha.5", + "metrics 0.6.0-alpha.7", "metrics-process", "scarb", "serde", @@ -12402,11 +12524,12 @@ dependencies = [ "tracing", "tracing-subscriber", "url", + "webbrowser", ] [[package]] name = "torii-client" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "async-trait", "camino", @@ -12434,7 +12557,7 @@ dependencies = [ [[package]] name = "torii-core" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "async-trait", @@ -12458,6 +12581,7 @@ dependencies = [ "serde_json", "slab", "sozo", + "sozo-ops", "sqlx", "starknet 0.9.0", "starknet-crypto 0.6.1", @@ -12470,7 +12594,7 @@ dependencies = [ [[package]] name = "torii-graphql" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "async-graphql", @@ -12485,12 +12609,14 @@ dependencies = [ "dojo-types", "dojo-world", "lazy_static", + "regex", "scarb", "scarb-ui", "serde", "serde_json", "serial_test", "sozo", + "sozo-ops", "sqlx", "starknet 0.9.0", "starknet-crypto 0.6.1", @@ -12509,7 +12635,7 @@ dependencies = [ [[package]] name = "torii-grpc" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "bytes", "crypto-bigint", @@ -12548,11 +12674,15 @@ dependencies = [ [[package]] name = "torii-relay" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "async-trait", + "crypto-bigint", + "dojo-types", + "dojo-world", "futures", + "indexmap 2.2.5", "libp2p", "libp2p-webrtc", "libp2p-webrtc-websys", @@ -12560,9 +12690,14 @@ dependencies = [ "regex", "serde", "serde_json", + "sqlx", + "starknet-core 0.9.0", + "starknet-crypto 0.6.1", + "starknet-ff", "tempfile", "thiserror", "tokio", + "torii-core", "tracing", "tracing-subscriber", "tracing-wasm", @@ -12573,7 +12708,7 @@ dependencies = [ [[package]] name = "torii-server" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" dependencies = [ "anyhow", "async-trait", @@ -12592,7 +12727,7 @@ dependencies = [ "hyper-reverse-proxy", "indexmap 1.9.3", "lazy_static", - "metrics 0.6.0-alpha.5", + "metrics 0.6.0-alpha.7", "metrics-process", "scarb", "serde", @@ -12890,7 +13025,7 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "types-test" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" [[package]] name = "ucd-trie" @@ -13329,6 +13464,23 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webbrowser" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b2391658b02c27719fc5a0a73d6e696285138e8b12fba9d4baa70451023c71" +dependencies = [ + "core-foundation", + "home", + "jni", + "log", + "ndk-context", + "objc", + "raw-window-handle", + "url", + "web-sys", +] + [[package]] name = "webpki" version = "0.22.4" @@ -13657,6 +13809,15 @@ dependencies = [ "windows-targets 0.52.0", ] +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + [[package]] name = "windows-sys" version = "0.48.0" @@ -13675,6 +13836,21 @@ dependencies = [ "windows-targets 0.52.0", ] +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -13705,6 +13881,12 @@ dependencies = [ "windows_x86_64_msvc 0.52.0", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -13717,6 +13899,12 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -13729,6 +13917,12 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -13741,6 +13935,12 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -13753,6 +13953,12 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -13765,6 +13971,12 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -13777,6 +13989,12 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" diff --git a/Cargo.toml b/Cargo.toml index 7176669031..bc5f44e2e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,7 @@ edition = "2021" license = "Apache-2.0" license-file = "LICENSE" repository = "https://github.com/dojoengine/dojo/" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" [profile.performance] codegen-units = 1 @@ -92,6 +92,7 @@ saya-core = { path = "crates/saya/core" } # sozo sozo-signers = { path = "crates/sozo/signers" } +sozo-ops = { path = "crates/sozo/ops" } anyhow = "1.0.75" assert_matches = "1.5.0" @@ -162,6 +163,7 @@ tokio = { version = "1.32.0", features = [ "full" ] } toml = "0.7.4" tracing = "0.1.34" tracing-subscriber = { version = "0.3.16", features = [ "env-filter", "json" ] } +regex = "1.10.3" url = { version = "2.4.0", features = [ "serde" ] } # server diff --git a/bin/sozo/Cargo.toml b/bin/sozo/Cargo.toml index 0bfb305818..fecb6807cc 100644 --- a/bin/sozo/Cargo.toml +++ b/bin/sozo/Cargo.toml @@ -44,6 +44,9 @@ tokio.workspace = true tracing-log = "0.1.3" tracing.workspace = true url.workspace = true +sozo-ops.workspace = true + +cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.2.2" } [dev-dependencies] assert_fs = "1.0.10" diff --git a/bin/sozo/src/args.rs b/bin/sozo/src/args.rs index 909c7ba10d..fc3c7c5f5b 100644 --- a/bin/sozo/src/args.rs +++ b/bin/sozo/src/args.rs @@ -9,6 +9,7 @@ use tracing_log::AsTrace; use crate::commands::auth::AuthArgs; use crate::commands::build::BuildArgs; +use crate::commands::clean::CleanArgs; use crate::commands::completions::CompletionsArgs; use crate::commands::dev::DevArgs; use crate::commands::events::EventsArgs; @@ -54,6 +55,8 @@ pub enum Commands { Build(BuildArgs), #[command(about = "Initialize a new project")] Init(InitArgs), + #[command(about = "Remove generated artifacts, manifests and abis")] + Clean(CleanArgs), #[command(about = "Run a migration, declaring and deploying contracts as necessary to \ update the world")] Migrate(Box), diff --git a/bin/sozo/src/commands/auth.rs b/bin/sozo/src/commands/auth.rs index a815999667..c07157077d 100644 --- a/bin/sozo/src/commands/auth.rs +++ b/bin/sozo/src/commands/auth.rs @@ -1,17 +1,17 @@ -use std::str::FromStr; - use anyhow::Result; use clap::{Args, Subcommand}; -use dojo_world::contracts::cairo_utils; -use dojo_world::metadata::dojo_metadata_from_workspace; +use dojo_world::contracts::WorldContractReader; +use dojo_world::metadata::Environment; use scarb::core::Config; -use starknet_crypto::FieldElement; +use sozo_ops::auth; +use starknet::accounts::ConnectedAccount; +use starknet::core::types::{BlockId, BlockTag}; use super::options::account::AccountOptions; use super::options::starknet::StarknetOptions; use super::options::transaction::TransactionOptions; use super::options::world::WorldOptions; -use crate::ops::auth; +use crate::utils; #[derive(Debug, Args)] pub struct AuthArgs { @@ -19,80 +19,6 @@ pub struct AuthArgs { pub command: AuthCommand, } -#[derive(Debug, Clone, PartialEq)] -pub struct ModelContract { - pub model: FieldElement, - pub contract: String, -} - -impl FromStr for ModelContract { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - let parts: Vec<&str> = s.split(',').collect(); - - let (model, contract) = match parts.as_slice() { - [model, contract] => (model, contract), - _ => anyhow::bail!( - "Model and contract address are expected to be comma separated: `sozo auth writer \ - model_name,0x1234`" - ), - }; - - let model = cairo_utils::str_to_felt(model) - .map_err(|_| anyhow::anyhow!("Invalid model name: {}", model))?; - - Ok(ModelContract { model, contract: contract.to_string() }) - } -} - -#[derive(Debug, Clone, PartialEq)] -pub enum ResourceType { - Contract(String), - Model(FieldElement), -} - -#[derive(Debug, Clone, PartialEq)] -pub struct OwnerResource { - pub resource: ResourceType, - pub owner: FieldElement, -} - -impl FromStr for OwnerResource { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - let parts: Vec<&str> = s.split(',').collect(); - - let (resource_part, owner_part) = match parts.as_slice() { - [resource, owner] => (*resource, *owner), - _ => anyhow::bail!( - "Owner and resource are expected to be comma separated: `sozo auth owner \ - resource_type:resource_name,0x1234`" - ), - }; - - let owner = FieldElement::from_hex_be(owner_part) - .map_err(|_| anyhow::anyhow!("Invalid owner address: {}", owner_part))?; - - let resource_parts = resource_part.split_once(':'); - let resource = match resource_parts { - Some(("contract", name)) => ResourceType::Contract(name.to_string()), - Some(("model", name)) => { - let model = cairo_utils::str_to_felt(name) - .map_err(|_| anyhow::anyhow!("Invalid model name: {}", name))?; - ResourceType::Model(model) - } - _ => anyhow::bail!( - "Resource is expected to be in the format `resource_type:resource_name`: `sozo \ - auth owner 0x1234,resource_type:resource_name`" - ), - }; - - Ok(OwnerResource { owner, resource }) - } -} - #[derive(Debug, Subcommand)] pub enum AuthKind { #[command(about = "Grant a contract permission to write to a model.")] @@ -103,7 +29,7 @@ pub enum AuthKind { #[arg(help = "A list of models and contract address to grant write access to. Comma \ separated values to indicate model name and contract address e.g. \ model_name,path::to::contract model_name,contract_address ")] - models_contracts: Vec, + models_contracts: Vec, }, #[command(about = "Grant ownership of a resource.")] Owner { @@ -114,10 +40,35 @@ pub enum AuthKind { values to indicate owner address and resouce e.g. \ contract:path::to::contract,0x1234 contract:contract_address,0x1111, \ model:model_name,0xbeef")] - owners_resources: Vec, + owners_resources: Vec, }, } +pub async fn grant( + world: WorldOptions, + account: AccountOptions, + starknet: StarknetOptions, + env_metadata: Option, + kind: AuthKind, + transaction: TransactionOptions, +) -> Result<()> { + let world_address = world.world_address.unwrap_or_default(); + let world = + utils::world_from_env_metadata(world, account, starknet, &env_metadata).await.unwrap(); + let provider = world.account.provider(); + let world_reader = WorldContractReader::new(world_address, &provider) + .with_block(BlockId::Tag(BlockTag::Pending)); + + match kind { + AuthKind::Writer { models_contracts } => { + auth::grant_writer(&world, models_contracts, world_reader, transaction.into()).await + } + AuthKind::Owner { owners_resources } => { + auth::grant_owner(world, owners_resources, transaction.into()).await + } + } +} + #[derive(Debug, Subcommand)] pub enum AuthCommand { #[command(about = "Grant an auth role.")] @@ -158,15 +109,14 @@ pub enum AuthCommand { impl AuthArgs { pub fn run(self, config: &Config) -> Result<()> { - let env_metadata = if config.manifest_path().exists() { - let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) - } else { - None - }; - - config.tokio_handle().block_on(auth::execute(self.command, env_metadata)) + let env_metadata = utils::load_metadata_from_config(config)?; + + match self.command { + AuthCommand::Grant { kind, world, starknet, account, transaction } => config + .tokio_handle() + .block_on(grant(world, account, starknet, env_metadata, kind, transaction)), + _ => todo!(), + } } } @@ -174,6 +124,7 @@ impl AuthArgs { mod tests { use std::str::FromStr; + use dojo_world::contracts::cairo_utils; use starknet_crypto::FieldElement; use super::*; @@ -183,23 +134,23 @@ mod tests { // Test valid input let input = "contract:path::to::contract,0x1234"; let expected_owner = FieldElement::from_hex_be("0x1234").unwrap(); - let expected_resource = ResourceType::Contract("path::to::contract".to_string()); - let expected = OwnerResource { owner: expected_owner, resource: expected_resource }; - let result = OwnerResource::from_str(input).unwrap(); + let expected_resource = auth::ResourceType::Contract("path::to::contract".to_string()); + let expected = auth::OwnerResource { owner: expected_owner, resource: expected_resource }; + let result = auth::OwnerResource::from_str(input).unwrap(); assert_eq!(result, expected); // Test valid input with model let input = "model:model_name,0x1234"; let expected_owner = FieldElement::from_hex_be("0x1234").unwrap(); let expected_model = cairo_utils::str_to_felt("model_name").unwrap(); - let expected_resource = ResourceType::Model(expected_model); - let expected = OwnerResource { owner: expected_owner, resource: expected_resource }; - let result = OwnerResource::from_str(input).unwrap(); + let expected_resource = auth::ResourceType::Model(expected_model); + let expected = auth::OwnerResource { owner: expected_owner, resource: expected_resource }; + let result = auth::OwnerResource::from_str(input).unwrap(); assert_eq!(result, expected); // Test invalid input let input = "invalid_input"; - let result = OwnerResource::from_str(input); + let result = auth::OwnerResource::from_str(input); assert!(result.is_err()); } @@ -210,13 +161,13 @@ mod tests { let expected_model = cairo_utils::str_to_felt("model_name").unwrap(); let expected_contract = "0x1234"; let expected = - ModelContract { model: expected_model, contract: expected_contract.to_string() }; - let result = ModelContract::from_str(input).unwrap(); + auth::ModelContract { model: expected_model, contract: expected_contract.to_string() }; + let result = auth::ModelContract::from_str(input).unwrap(); assert_eq!(result, expected); // Test invalid input let input = "invalid_input"; - let result = ModelContract::from_str(input); + let result = auth::ModelContract::from_str(input); assert!(result.is_err()); } } diff --git a/bin/sozo/src/commands/clean.rs b/bin/sozo/src/commands/clean.rs new file mode 100644 index 0000000000..0398c545a7 --- /dev/null +++ b/bin/sozo/src/commands/clean.rs @@ -0,0 +1,55 @@ +use std::fs; + +use anyhow::Result; +use camino::Utf8PathBuf; +use clap::Args; +use dojo_lang::compiler::{ABIS_DIR, BASE_DIR, MANIFESTS_DIR}; +use scarb::core::Config; + +#[derive(Debug, Args)] +pub struct CleanArgs { + #[arg(short, long)] + #[arg(help = "Remove manifests and abis only.")] + #[arg(long_help = "Remove manifests and abis only.")] + pub manifests_abis: bool, + + #[arg(short, long)] + #[arg(help = "Remove artifacts only.")] + #[arg(long_help = "Remove artifacts only.")] + pub artifacts: bool, +} + +impl CleanArgs { + pub fn clean_manifests_abis(&self, root_dir: &Utf8PathBuf) -> Result<()> { + let dirs = vec![ + root_dir.join(MANIFESTS_DIR).join(BASE_DIR), + root_dir.join(ABIS_DIR).join(BASE_DIR), + ]; + + for d in dirs { + if d.exists() { + fs::remove_dir_all(d)?; + } + } + + Ok(()) + } + + pub fn run(self, config: &Config) -> Result<()> { + let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; + + let clean_manifests_abis = self.manifests_abis || !self.artifacts; + let clean_artifacts = self.artifacts || !self.manifests_abis; + + if clean_manifests_abis { + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + self.clean_manifests_abis(&manifest_dir)?; + } + + if clean_artifacts { + scarb::ops::clean(config)?; + } + + Ok(()) + } +} diff --git a/bin/sozo/src/commands/dev.rs b/bin/sozo/src/commands/dev.rs index 5f873abdc7..c5dc207510 100644 --- a/bin/sozo/src/commands/dev.rs +++ b/bin/sozo/src/commands/dev.rs @@ -10,23 +10,24 @@ use cairo_lang_filesystem::ids::FileId; use clap::Args; use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; use dojo_lang::scarb_internal::build_scarb_root_database; -use dojo_world::manifest::{BaseManifest, DeployedManifest}; +use dojo_world::manifest::{BaseManifest, DeploymentManifest}; use dojo_world::metadata::dojo_metadata_from_workspace; use dojo_world::migration::world::WorldDiff; use notify_debouncer_mini::notify::RecursiveMode; use notify_debouncer_mini::{new_debouncer, DebouncedEvent, DebouncedEventKind}; use scarb::compiler::CompilationUnit; use scarb::core::{Config, Workspace}; +use sozo_ops::migration::{self, prepare_migration}; use starknet::accounts::SingleOwnerAccount; use starknet::core::types::FieldElement; use starknet::providers::Provider; use starknet::signers::Signer; use tracing_log::log; +use super::migrate::setup_env; use super::options::account::AccountOptions; use super::options::starknet::StarknetOptions; use super::options::world::WorldOptions; -use crate::ops::migration; #[derive(Args)] pub struct DevArgs { @@ -113,8 +114,8 @@ async fn migrate( account: &SingleOwnerAccount, name: Option, ws: &Workspace<'_>, - previous_manifest: Option, -) -> Result<(DeployedManifest, Option)> + previous_manifest: Option, +) -> Result<(DeploymentManifest, Option)> where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, @@ -140,14 +141,16 @@ where return Ok((new_manifest.into(), world_address)); } - match migration::apply_diff(ws, &target_dir, diff, name.clone(), world_address, account, None) - .await - { - Ok(address) => { - config - .ui() - .print(format!("🎉 World at address {} updated!", format_args!("{:#x}", address))); - world_address = Some(address); + let ui = ws.config().ui(); + let strategy = prepare_migration(&target_dir, diff, name, world_address, &ui)?; + + match migration::apply_diff(ws, account, None, &strategy).await { + Ok(migration_output) => { + config.ui().print(format!( + "🎉 World at address {} updated!", + format_args!("{:#x}", migration_output.world_address) + )); + world_address = Some(migration_output.world_address); } Err(err) => { config.ui().error(err.to_string()); @@ -208,14 +211,14 @@ impl DevArgs { RecursiveMode::Recursive, )?; let name = self.name.clone(); - let mut previous_manifest: Option = Option::None; + let mut previous_manifest: Option = Option::None; let result = build(&mut context); let Some((mut world_address, account, _)) = context .ws .config() .tokio_handle() - .block_on(migration::setup_env( + .block_on(setup_env( &context.ws, self.account, self.starknet, diff --git a/bin/sozo/src/commands/events.rs b/bin/sozo/src/commands/events.rs index 04297f2a7d..95eb001ac1 100644 --- a/bin/sozo/src/commands/events.rs +++ b/bin/sozo/src/commands/events.rs @@ -1,11 +1,11 @@ use anyhow::Result; use clap::Parser; -use dojo_world::metadata::dojo_metadata_from_workspace; use scarb::core::Config; +use sozo_ops::events; use super::options::starknet::StarknetOptions; use super::options::world::WorldOptions; -use crate::ops::events; +use crate::utils; #[derive(Parser, Debug)] pub struct EventsArgs { @@ -42,16 +42,28 @@ pub struct EventsArgs { impl EventsArgs { pub fn run(self, config: &Config) -> Result<()> { + let env_metadata = utils::load_metadata_from_config(config)?; let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - - let env_metadata = if config.manifest_path().exists() { - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) - } else { - None - }; - let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + let provider = self.starknet.provider(env_metadata.as_ref())?; + + let event_filter = events::get_event_filter( + self.from_block, + self.to_block, + self.events, + self.world.world_address, + ); - config.tokio_handle().block_on(events::execute(self, env_metadata, &manifest_dir)) + config.tokio_handle().block_on(async { + events::parse( + self.chunk_size, + provider, + self.continuation_token, + event_filter, + self.json, + &manifest_dir, + ) + .await + }) } } diff --git a/bin/sozo/src/commands/execute.rs b/bin/sozo/src/commands/execute.rs index 22ccf054de..ed3f1b11c8 100644 --- a/bin/sozo/src/commands/execute.rs +++ b/bin/sozo/src/commands/execute.rs @@ -1,14 +1,14 @@ use anyhow::Result; use clap::Args; -use dojo_world::metadata::dojo_metadata_from_workspace; use scarb::core::Config; +use sozo_ops::execute; use starknet::core::types::FieldElement; use super::options::account::AccountOptions; use super::options::starknet::StarknetOptions; use super::options::transaction::TransactionOptions; use super::options::world::WorldOptions; -use crate::ops::execute; +use crate::utils; #[derive(Debug, Args)] #[command(about = "Execute a system with the given calldata.")] @@ -41,14 +41,20 @@ pub struct ExecuteArgs { impl ExecuteArgs { pub fn run(self, config: &Config) -> Result<()> { - let env_metadata = if config.manifest_path().exists() { - let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) - } else { - None - }; - - config.tokio_handle().block_on(execute::execute(self, env_metadata)) + let env_metadata = utils::load_metadata_from_config(config)?; + + config.tokio_handle().block_on(async { + let world = utils::world_from_env_metadata( + self.world, + self.account, + self.starknet, + &env_metadata, + ) + .await + .unwrap(); + let tx_config = self.transaction.into(); + + execute::execute(self.contract, self.entrypoint, self.calldata, world, tx_config).await + }) } } diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index 674fda0b6d..5690a21936 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -1,14 +1,20 @@ -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, Context, Result}; use clap::Args; use dojo_lang::compiler::MANIFESTS_DIR; -use dojo_world::metadata::dojo_metadata_from_workspace; -use scarb::core::Config; +use dojo_world::metadata::{dojo_metadata_from_workspace, Environment}; +use scarb::core::{Config, Workspace}; +use sozo_ops::migration; +use starknet::accounts::{Account, ConnectedAccount, SingleOwnerAccount}; +use starknet::core::types::{BlockId, BlockTag, FieldElement, StarknetError}; +use starknet::core::utils::parse_cairo_short_string; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::{JsonRpcClient, Provider, ProviderError}; +use starknet::signers::LocalWallet; use super::options::account::AccountOptions; use super::options::starknet::StarknetOptions; use super::options::transaction::TransactionOptions; use super::options::world::WorldOptions; -use crate::ops::migration; #[derive(Args)] pub struct MigrateArgs { @@ -35,6 +41,51 @@ pub struct MigrateArgs { pub transaction: TransactionOptions, } +pub async fn setup_env<'a>( + ws: &'a Workspace<'a>, + account: AccountOptions, + starknet: StarknetOptions, + world: WorldOptions, + name: Option<&'a String>, + env: Option<&'a Environment>, +) -> Result<( + Option, + SingleOwnerAccount, LocalWallet>, + String, +)> { + let ui = ws.config().ui(); + + let world_address = world.address(env).ok(); + + let (account, chain_id) = { + let provider = starknet.provider(env)?; + let chain_id = provider.chain_id().await?; + let chain_id = parse_cairo_short_string(&chain_id) + .with_context(|| "Cannot parse chain_id as string")?; + + let mut account = account.account(provider, env).await?; + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let address = account.address(); + + ui.print(format!("\nMigration account: {address:#x}")); + if let Some(name) = name { + ui.print(format!("\nWorld name: {name}\n")); + } + + match account.provider().get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await { + Ok(_) => Ok((account, chain_id)), + Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { + Err(anyhow!("Account with address {:#x} doesn't exist.", account.address())) + } + Err(e) => Err(e.into()), + } + } + .with_context(|| "Problem initializing account for migration.")?; + + Ok((world_address, account, chain_id)) +} + impl MigrateArgs { pub fn run(mut self, config: &Config) -> Result<()> { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; @@ -57,8 +108,19 @@ impl MigrateArgs { return Err(anyhow!("Build project using `sozo build` first")); } - ws.config().tokio_handle().block_on(migration::execute(&ws, self, env_metadata))?; + config.tokio_handle().block_on(async { + let (world_address, account, chain_id) = setup_env( + &ws, + self.account, + self.starknet, + self.world, + self.name.as_ref(), + env_metadata.as_ref(), + ) + .await?; - Ok(()) + migration::migrate(&ws, world_address, chain_id, &account, self.name, self.dry_run) + .await + }) } } diff --git a/bin/sozo/src/commands/mod.rs b/bin/sozo/src/commands/mod.rs index 8281d5cc41..fce588da4f 100644 --- a/bin/sozo/src/commands/mod.rs +++ b/bin/sozo/src/commands/mod.rs @@ -5,6 +5,7 @@ use crate::args::Commands; pub(crate) mod auth; pub(crate) mod build; +pub(crate) mod clean; pub(crate) mod completions; pub(crate) mod dev; pub(crate) mod events; @@ -19,6 +20,7 @@ pub(crate) mod test; pub fn run(command: Commands, config: &Config) -> Result<()> { match command { Commands::Init(args) => args.run(config), + Commands::Clean(args) => args.run(config), Commands::Test(args) => args.run(config), Commands::Build(args) => args.run(config), Commands::Migrate(args) => args.run(config), diff --git a/bin/sozo/src/commands/model.rs b/bin/sozo/src/commands/model.rs index b4f00d0055..1c69bbe3fe 100644 --- a/bin/sozo/src/commands/model.rs +++ b/bin/sozo/src/commands/model.rs @@ -1,12 +1,12 @@ use anyhow::Result; use clap::{Args, Subcommand}; -use dojo_world::metadata::dojo_metadata_from_workspace; use scarb::core::Config; +use sozo_ops::model; use starknet::core::types::FieldElement; use super::options::starknet::StarknetOptions; use super::options::world::WorldOptions; -use crate::ops::model; +use crate::utils; #[derive(Debug, Args)] pub struct ModelArgs { @@ -76,14 +76,31 @@ pub enum ModelCommands { impl ModelArgs { pub fn run(self, config: &Config) -> Result<()> { - let env_metadata = if config.manifest_path().exists() { - let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) - } else { - None - }; - - config.tokio_handle().block_on(model::execute(self.command, env_metadata)) + let env_metadata = utils::load_metadata_from_config(config)?; + + config.tokio_handle().block_on(async { + match self.command { + ModelCommands::ClassHash { name, starknet, world } => { + let world_address = world.address(env_metadata.as_ref()).unwrap(); + let provider = starknet.provider(env_metadata.as_ref()).unwrap(); + model::model_class_hash(name, world_address, provider).await + } + ModelCommands::ContractAddress { name, starknet, world } => { + let world_address = world.address(env_metadata.as_ref()).unwrap(); + let provider = starknet.provider(env_metadata.as_ref()).unwrap(); + model::model_contract_address(name, world_address, provider).await + } + ModelCommands::Schema { name, to_json, starknet, world } => { + let world_address = world.address(env_metadata.as_ref()).unwrap(); + let provider = starknet.provider(env_metadata.as_ref()).unwrap(); + model::model_schema(name, world_address, provider, to_json).await + } + ModelCommands::Get { name, keys, starknet, world } => { + let world_address = world.address(env_metadata.as_ref()).unwrap(); + let provider = starknet.provider(env_metadata.as_ref()).unwrap(); + model::model_get(name, keys, world_address, provider).await + } + } + }) } } diff --git a/bin/sozo/src/commands/options/transaction.rs b/bin/sozo/src/commands/options/transaction.rs index 23874ac93d..7901c6ca6a 100644 --- a/bin/sozo/src/commands/options/transaction.rs +++ b/bin/sozo/src/commands/options/transaction.rs @@ -13,15 +13,28 @@ pub struct TransactionOptions { pub fee_estimate_multiplier: Option, #[arg(short, long)] - #[arg(help = "Wait until the transaction is accepted by the sequencer, returning the receipt.")] + #[arg(help = "Wait until the transaction is accepted by the sequencer, returning the status \ + and hash.")] #[arg(long_help = "Wait until the transaction is accepted by the sequencer, returning the \ - receipt. This will poll the transaction status until it gets accepted or \ - rejected by the sequencer.")] + status and the hash. This will poll the transaction status until it gets \ + accepted or rejected by the sequencer.")] pub wait: bool, + + #[arg(short, long)] + #[arg( + help = "If --wait is set, returns the full transaction receipt. Otherwise, it is a no-op." + )] + #[arg(long_help = "If --wait is set, returns the full transaction receipt. Otherwise, it is \ + a no-op.")] + pub receipt: bool, } impl From for TxConfig { fn from(value: TransactionOptions) -> Self { - Self { fee_estimate_multiplier: value.fee_estimate_multiplier } + Self { + fee_estimate_multiplier: value.fee_estimate_multiplier, + wait: value.wait, + receipt: value.receipt, + } } } diff --git a/bin/sozo/src/commands/register.rs b/bin/sozo/src/commands/register.rs index a1df25a833..eca9d46590 100644 --- a/bin/sozo/src/commands/register.rs +++ b/bin/sozo/src/commands/register.rs @@ -1,14 +1,16 @@ use anyhow::Result; use clap::{Args, Subcommand}; -use dojo_world::metadata::dojo_metadata_from_workspace; +use dojo_world::contracts::WorldContractReader; use scarb::core::Config; -use starknet::core::types::FieldElement; +use sozo_ops::register; +use starknet::accounts::ConnectedAccount; +use starknet::core::types::{BlockId, BlockTag, FieldElement}; use super::options::account::AccountOptions; use super::options::starknet::StarknetOptions; use super::options::transaction::TransactionOptions; use super::options::world::WorldOptions; -use crate::ops::register; +use crate::utils; #[derive(Debug, Args)] pub struct RegisterArgs { @@ -42,14 +44,32 @@ pub enum RegisterCommand { impl RegisterArgs { pub fn run(self, config: &Config) -> Result<()> { - let env_metadata = if config.manifest_path().exists() { - let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; + let env_metadata = utils::load_metadata_from_config(config)?; - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) - } else { - None + let (starknet, world, account, transaction, models) = match self.command { + RegisterCommand::Model { starknet, world, account, transaction, models } => { + (starknet, world, account, transaction, models) + } }; - config.tokio_handle().block_on(register::execute(self.command, env_metadata)) + let world_address = world.world_address.unwrap_or_default(); + + config.tokio_handle().block_on(async { + let world = + utils::world_from_env_metadata(world, account, starknet, &env_metadata).await?; + let provider = world.account.provider(); + let world_reader = WorldContractReader::new(world_address, &provider) + .with_block(BlockId::Tag(BlockTag::Pending)); + + register::model_register( + models, + &world, + transaction.into(), + world_reader, + world_address, + config, + ) + .await + }) } } diff --git a/bin/sozo/src/commands/test.rs b/bin/sozo/src/commands/test.rs index 8aabe211c1..532f583387 100644 --- a/bin/sozo/src/commands/test.rs +++ b/bin/sozo/src/commands/test.rs @@ -65,7 +65,7 @@ impl TestArgs { main_crate_ids.extend(collect_external_crate_ids(&db, external_contracts)); } - if DiagnosticsReporter::stderr().check(&db) { + if DiagnosticsReporter::stderr().allow_warnings().check(&db) { bail!("failed to compile"); } diff --git a/bin/sozo/src/lib.rs b/bin/sozo/src/lib.rs index fc9ec51d87..410ce80685 100644 --- a/bin/sozo/src/lib.rs +++ b/bin/sozo/src/lib.rs @@ -1,3 +1,3 @@ pub mod args; pub mod commands; -pub mod ops; +pub mod utils; diff --git a/bin/sozo/src/main.rs b/bin/sozo/src/main.rs index e0385cf849..060a9d0cf9 100644 --- a/bin/sozo/src/main.rs +++ b/bin/sozo/src/main.rs @@ -3,6 +3,7 @@ use std::process::exit; use std::str::FromStr; use anyhow::Result; +use args::{Commands, SozoArgs}; use camino::Utf8PathBuf; use clap::Parser; use dojo_lang::compiler::DojoCompiler; @@ -11,7 +12,10 @@ use scarb::compiler::CompilerRepository; use scarb::core::{Config, TomlManifest}; use scarb_ui::{OutputFormat, Ui}; use semver::Version; -use sozo::args::{Commands, SozoArgs}; + +mod args; +mod commands; +mod utils; fn main() { let args = SozoArgs::parse(); @@ -48,7 +52,7 @@ fn cli_main(args: SozoArgs) -> Result<()> { .compilers(compilers) .build()?; - sozo::commands::run(args.command, &config) + commands::run(args.command, &config) } fn verify_cairo_version_compatibility(manifest_path: &Utf8PathBuf) -> Result<()> { diff --git a/bin/sozo/src/ops/auth.rs b/bin/sozo/src/ops/auth.rs deleted file mode 100644 index 03903d4df2..0000000000 --- a/bin/sozo/src/ops/auth.rs +++ /dev/null @@ -1,78 +0,0 @@ -use anyhow::{Context, Result}; -use dojo_world::contracts::world::WorldContract; -use dojo_world::metadata::Environment; -use dojo_world::utils::TransactionWaiter; -use starknet::accounts::Account; - -use super::get_contract_address; -use crate::commands::auth::{AuthCommand, AuthKind, ResourceType}; - -pub async fn execute(command: AuthCommand, env_metadata: Option) -> Result<()> { - match command { - AuthCommand::Grant { kind, world, starknet, account, transaction } => match kind { - AuthKind::Writer { models_contracts } => { - let world_address = world.address(env_metadata.as_ref())?; - let provider = starknet.provider(env_metadata.as_ref())?; - - let account = account.account(&provider, env_metadata.as_ref()).await?; - let world = WorldContract::new(world_address, &account); - - let mut calls = Vec::new(); - - for mc in models_contracts { - let contract = get_contract_address(&world, mc.contract).await?; - calls.push(world.grant_writer_getcall(&mc.model, &contract.into())); - } - - let res = account - .execute(calls) - .send() - .await - .with_context(|| "Failed to send transaction")?; - - if transaction.wait { - let receipt = TransactionWaiter::new(res.transaction_hash, &provider).await?; - println!("{}", serde_json::to_string_pretty(&receipt)?); - } else { - println!("Transaction hash: {:#x}", res.transaction_hash); - } - } - AuthKind::Owner { owners_resources } => { - let world_address = world.address(env_metadata.as_ref())?; - let provider = starknet.provider(env_metadata.as_ref())?; - - let account = account.account(&provider, env_metadata.as_ref()).await?; - let world = WorldContract::new(world_address, &account); - - let mut calls = Vec::new(); - - for or in owners_resources { - let resource = match &or.resource { - ResourceType::Model(name) => *name, - ResourceType::Contract(name_or_address) => { - get_contract_address(&world, name_or_address.clone()).await? - } - }; - - calls.push(world.grant_owner_getcall(&or.owner.into(), &resource)); - } - - let res = account - .execute(calls) - .send() - .await - .with_context(|| "Failed to send transaction")?; - - if transaction.wait { - let receipt = TransactionWaiter::new(res.transaction_hash, &provider).await?; - println!("{}", serde_json::to_string_pretty(&receipt)?); - } else { - println!("Transaction hash: {:#x}", res.transaction_hash); - } - } - }, - _ => todo!(), - } - - Ok(()) -} diff --git a/bin/sozo/src/ops/events.rs b/bin/sozo/src/ops/events.rs deleted file mode 100644 index a9d987928c..0000000000 --- a/bin/sozo/src/ops/events.rs +++ /dev/null @@ -1,275 +0,0 @@ -use std::collections::{HashMap, VecDeque}; -use std::fs; - -use anyhow::{anyhow, Context, Error, Result}; -use cairo_lang_starknet::abi::{self, Event, EventKind, Item}; -use cairo_lang_starknet::plugin::events::EventFieldKind; -use camino::Utf8PathBuf; -use dojo_lang::compiler::{DEPLOYMENTS_DIR, MANIFESTS_DIR}; -use dojo_world::manifest::{DeployedManifest, ManifestMethods}; -use dojo_world::metadata::Environment; -use starknet::core::types::{BlockId, EventFilter}; -use starknet::core::utils::{parse_cairo_short_string, starknet_keccak}; -use starknet::providers::Provider; - -use crate::commands::events::EventsArgs; - -pub async fn execute( - args: EventsArgs, - env_metadata: Option, - manifest_dir: &Utf8PathBuf, -) -> Result<()> { - let EventsArgs { - chunk_size, - starknet, - world, - from_block, - to_block, - events, - continuation_token, - json, - .. - } = args; - - let provider = starknet.provider(env_metadata.as_ref())?; - let chain_id = provider.chain_id().await?; - let chain_id = - parse_cairo_short_string(&chain_id).with_context(|| "Cannot parse chain_id as string")?; - - let events_map = if !json { - let deployed_manifest = manifest_dir - .join(MANIFESTS_DIR) - .join(DEPLOYMENTS_DIR) - .join(chain_id) - .with_extension("toml"); - - if !deployed_manifest.exists() { - return Err(anyhow!("Run scarb migrate before running this command")); - } - - Some(extract_events(&DeployedManifest::load_from_path(&deployed_manifest)?, manifest_dir)?) - } else { - None - }; - - let from_block = from_block.map(BlockId::Number); - let to_block = to_block.map(BlockId::Number); - // Currently dojo doesn't use custom keys for events. In future if custom keys are used this - // needs to be updated for granular queries. - let keys = - events.map(|e| vec![e.iter().map(|event| starknet_keccak(event.as_bytes())).collect()]); - - let provider = starknet.provider(env_metadata.as_ref())?; - let event_filter = EventFilter { from_block, to_block, address: world.world_address, keys }; - - let res = provider.get_events(event_filter, continuation_token, chunk_size).await?; - - if let Some(events_map) = events_map { - parse_and_print_events(res, events_map)?; - } else { - println!("{}", serde_json::to_string_pretty(&res)?); - } - - Ok(()) -} - -fn parse_and_print_events( - res: starknet::core::types::EventsPage, - events_map: HashMap>, -) -> Result<()> { - println!("Continuation token: {:?}", res.continuation_token); - println!("----------------------------------------------"); - for event in res.events { - if let Some(e) = parse_event(event.clone(), &events_map) { - println!("{e}"); - } else { - // Couldn't parse event - println!("{}", serde_json::to_string_pretty(&event)?); - } - } - Ok(()) -} - -fn parse_event( - event: starknet::core::types::EmittedEvent, - events_map: &HashMap>, -) -> Option { - let keys = event.keys; - let event_hash = keys[0].to_string(); - let events = events_map.get(&event_hash)?; - - 'outer: for e in events { - let mut ret = format!("Event name: {}\n", e.name); - let mut data = VecDeque::from(event.data.clone()); - - // Length is two only when its custom event - if keys.len() == 2 { - let name = parse_cairo_short_string(&keys[1]).ok()?; - ret.push_str(&format!("Model name: {}\n", name)); - } - - match &e.kind { - EventKind::Struct { members } => { - for field in members { - if field.kind != EventFieldKind::DataSerde { - continue; - } - match field.ty.as_str() { - "core::starknet::contract_address::ContractAddress" - | "core::starknet::class_hash::ClassHash" => { - let value = match data.pop_front() { - Some(addr) => addr, - None => continue 'outer, - }; - ret.push_str(&format!("{}: {:#x}\n", field.name, value)); - } - "core::felt252" => { - let value = match data.pop_front() { - Some(addr) => addr, - None => continue 'outer, - }; - let value = match parse_cairo_short_string(&value) { - Ok(v) => v, - Err(_) => format!("{:#x}", value), - }; - ret.push_str(&format!("{}: {}\n", field.name, value)); - } - "core::integer::u8" => { - let value = match data.pop_front() { - Some(addr) => addr, - None => continue 'outer, - }; - let num = match value.to_string().parse::() { - Ok(num) => num, - Err(_) => continue 'outer, - }; - - ret.push_str(&format!("{}: {}\n", field.name, num)); - } - "dojo_examples::systems::move::Direction" => { - let value = match data.pop_front() { - Some(addr) => addr, - None => continue 'outer, - }; - ret.push_str(&format!("{}: {}\n", field.name, value)); - } - "core::array::Span::" => { - let length = match data.pop_front() { - Some(addr) => addr, - None => continue 'outer, - }; - let length = match length.to_string().parse::() { - Ok(len) => len, - Err(_) => continue 'outer, - }; - ret.push_str(&format!("{}: ", field.name)); - if data.len() >= length { - ret.push_str(&format!( - "{:?}\n", - data.drain(..length) - .map(|e| format!("{:#x}", e)) - .collect::>() - )); - } else { - continue 'outer; - } - } - _ => { - return None; - } - } - } - return Some(ret); - } - EventKind::Enum { .. } => unreachable!("shouldn't reach here"), - } - } - - None -} - -fn extract_events( - manifest: &DeployedManifest, - manifest_dir: &Utf8PathBuf, -) -> Result>, Error> { - fn inner_helper( - events: &mut HashMap>, - abi_path: &Utf8PathBuf, - manifest_dir: &Utf8PathBuf, - ) -> Result<(), Error> { - let full_abi_path = manifest_dir.join(abi_path); - let abi: abi::Contract = serde_json::from_str(&fs::read_to_string(full_abi_path)?)?; - - for item in abi.into_iter() { - if let Item::Event(e) = item { - match e.kind { - abi::EventKind::Struct { .. } => { - let event_name = starknet_keccak( - e.name - .split("::") - .last() - .expect("valid fully qualified name") - .as_bytes(), - ); - let vec = events.entry(event_name.to_string()).or_default(); - vec.push(e.clone()); - } - abi::EventKind::Enum { .. } => (), - } - } - } - - Ok(()) - } - - let mut events_map = HashMap::new(); - - if let Some(abi_path) = manifest.world.inner.abi() { - inner_helper(&mut events_map, abi_path, manifest_dir)?; - } - - for contract in &manifest.contracts { - if let Some(abi_path) = contract.inner.abi() { - inner_helper(&mut events_map, abi_path, manifest_dir)?; - } - } - - for model in &manifest.contracts { - if let Some(abi_path) = model.inner.abi() { - inner_helper(&mut events_map, abi_path, manifest_dir)?; - } - } - - Ok(events_map) -} - -#[cfg(test)] -mod test { - use camino::Utf8Path; - use clap::Parser; - use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; - use dojo_world::manifest::BaseManifest; - - use super::*; - #[test] - fn events_are_parsed_correctly() { - let arg = EventsArgs::parse_from(["event", "Event1,Event2", "--chunk-size", "1"]); - assert!(arg.events.unwrap().len() == 2); - assert!(arg.from_block.is_none()); - assert!(arg.to_block.is_none()); - assert!(arg.chunk_size == 1); - } - - #[test] - fn extract_events_work_as_expected() { - let manifest_dir = Utf8Path::new("../../examples/spawn-and-move").to_path_buf(); - let manifest = - BaseManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(BASE_DIR)) - .unwrap() - .into(); - let result = extract_events(&manifest, &manifest_dir).unwrap(); - - // we are just collection all events from manifest file so just verifying count should work - assert!(result.len() == 2); - } -} diff --git a/bin/sozo/src/ops/execute.rs b/bin/sozo/src/ops/execute.rs deleted file mode 100644 index fd0f8d1373..0000000000 --- a/bin/sozo/src/ops/execute.rs +++ /dev/null @@ -1,40 +0,0 @@ -use anyhow::{Context, Result}; -use dojo_world::contracts::world::WorldContract; -use dojo_world::metadata::Environment; -use dojo_world::utils::TransactionWaiter; -use starknet::accounts::{Account, Call}; -use starknet::core::utils::get_selector_from_name; - -use super::get_contract_address; -use crate::commands::execute::ExecuteArgs; - -pub async fn execute(args: ExecuteArgs, env_metadata: Option) -> Result<()> { - let ExecuteArgs { contract, entrypoint, calldata, starknet, world, account, transaction } = - args; - - let provider = starknet.provider(env_metadata.as_ref())?; - - let account = account.account(&provider, env_metadata.as_ref()).await?; - let world_address = world.address(env_metadata.as_ref())?; - let world = WorldContract::new(world_address, &account); - - let contract_address = get_contract_address(&world, contract).await?; - let res = account - .execute(vec![Call { - calldata, - to: contract_address, - selector: get_selector_from_name(&entrypoint)?, - }]) - .send() - .await - .with_context(|| "Failed to send transaction")?; - - if transaction.wait { - let receipt = TransactionWaiter::new(res.transaction_hash, &provider).await?; - println!("{}", serde_json::to_string_pretty(&receipt)?); - } else { - println!("Transaction hash: {:#x}", res.transaction_hash); - } - - Ok(()) -} diff --git a/bin/sozo/src/ops/model.rs b/bin/sozo/src/ops/model.rs deleted file mode 100644 index a4a38441e0..0000000000 --- a/bin/sozo/src/ops/model.rs +++ /dev/null @@ -1,67 +0,0 @@ -use anyhow::Result; -use dojo_world::contracts::model::ModelReader; -use dojo_world::contracts::world::WorldContractReader; -use dojo_world::metadata::Environment; -use starknet::core::types::{BlockId, BlockTag}; - -use crate::commands::model::ModelCommands; - -pub async fn execute(command: ModelCommands, env_metadata: Option) -> Result<()> { - match command { - ModelCommands::ClassHash { name, world, starknet } => { - let world_address = world.address(env_metadata.as_ref())?; - let provider = starknet.provider(env_metadata.as_ref())?; - - let world = WorldContractReader::new(world_address, &provider) - .with_block(BlockId::Tag(BlockTag::Pending)); - - let model = world.model_reader(&name).await?; - - println!("{:#x}", model.class_hash()); - } - - ModelCommands::ContractAddress { name, world, starknet } => { - let world_address = world.address(env_metadata.as_ref())?; - let provider = starknet.provider(env_metadata.as_ref())?; - - let world = WorldContractReader::new(world_address, &provider) - .with_block(BlockId::Tag(BlockTag::Pending)); - - let model = world.model_reader(&name).await?; - - println!("{:#x}", model.contract_address()); - } - - ModelCommands::Schema { name, world, starknet, to_json } => { - let world_address = world.address(env_metadata.as_ref())?; - let provider = starknet.provider(env_metadata.as_ref())?; - - let world = WorldContractReader::new(world_address, &provider) - .with_block(BlockId::Tag(BlockTag::Pending)); - - let model = world.model_reader(&name).await?; - let schema = model.schema().await?; - - if to_json { - println!("{}", serde_json::to_string_pretty(&schema)?) - } else { - println!("{schema}"); - } - } - - ModelCommands::Get { name, keys, starknet, world, .. } => { - let world_address = world.address(env_metadata.as_ref())?; - let provider = starknet.provider(env_metadata.as_ref())?; - - let world = WorldContractReader::new(world_address, &provider) - .with_block(BlockId::Tag(BlockTag::Pending)); - - let model = world.model_reader(&name).await?; - let entity = model.entity(&keys).await?; - - println!("{entity}") - } - } - - Ok(()) -} diff --git a/bin/sozo/src/ops/register.rs b/bin/sozo/src/ops/register.rs deleted file mode 100644 index c0d37d8199..0000000000 --- a/bin/sozo/src/ops/register.rs +++ /dev/null @@ -1,38 +0,0 @@ -use anyhow::{Context, Result}; -use dojo_world::contracts::WorldContract; -use dojo_world::metadata::Environment; -use dojo_world::utils::TransactionWaiter; -use starknet::accounts::Account; - -use crate::commands::register::RegisterCommand; - -pub async fn execute(command: RegisterCommand, env_metadata: Option) -> Result<()> { - match command { - RegisterCommand::Model { models, world, starknet, account, transaction } => { - let world_address = world.address(env_metadata.as_ref())?; - let provider = starknet.provider(env_metadata.as_ref())?; - - let account = account.account(&provider, env_metadata.as_ref()).await?; - let world = WorldContract::new(world_address, &account); - - let calls = models - .iter() - .map(|c| world.register_model_getcall(&(*c).into())) - .collect::>(); - - let res = account - .execute(calls) - .send() - .await - .with_context(|| "Failed to send transaction")?; - - if transaction.wait { - let receipt = TransactionWaiter::new(res.transaction_hash, &provider).await?; - println!("{}", serde_json::to_string_pretty(&receipt)?); - } else { - println!("Transaction hash: {:#x}", res.transaction_hash); - } - } - } - Ok(()) -} diff --git a/bin/sozo/src/utils.rs b/bin/sozo/src/utils.rs new file mode 100644 index 0000000000..76d6de797b --- /dev/null +++ b/bin/sozo/src/utils.rs @@ -0,0 +1,37 @@ +use anyhow::Error; +use dojo_world::contracts::world::WorldContract; +use dojo_world::metadata::{dojo_metadata_from_workspace, Environment}; +use scarb::core::Config; +use starknet::accounts::SingleOwnerAccount; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet::signers::LocalWallet; + +use crate::commands::options::account::AccountOptions; +use crate::commands::options::starknet::StarknetOptions; +use crate::commands::options::world::WorldOptions; + +pub fn load_metadata_from_config(config: &Config) -> Result, Error> { + let env_metadata = if config.manifest_path().exists() { + let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; + + dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) + } else { + None + }; + + Ok(env_metadata) +} + +pub async fn world_from_env_metadata( + world: WorldOptions, + account: AccountOptions, + starknet: StarknetOptions, + env_metadata: &Option, +) -> Result, LocalWallet>>, Error> { + let world_address = world.address(env_metadata.as_ref())?; + let provider = starknet.provider(env_metadata.as_ref())?; + + let account = account.account(provider, env_metadata.as_ref()).await?; + Ok(WorldContract::new(world_address, account)) +} diff --git a/bin/sozo/tests/register_test.rs b/bin/sozo/tests/register_test.rs new file mode 100644 index 0000000000..176bd1c5ec --- /dev/null +++ b/bin/sozo/tests/register_test.rs @@ -0,0 +1,55 @@ +mod utils; + +use dojo_test_utils::compiler::build_test_config; +use dojo_test_utils::migration::prepare_migration; +use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, TestSequencer, +}; +use scarb::ops; +use sozo_ops::migration::execute_strategy; +use starknet::accounts::Account; +use starknet::core::types::{BlockId, BlockTag}; +use utils::snapbox::get_snapbox; + +#[tokio::test(flavor = "multi_thread")] +async fn reregister_models() { + let config = build_test_config("../../examples/spawn-and-move/Scarb.toml").unwrap(); + let ws = ops::read_workspace(config.manifest_path(), &config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); + + let base_dir = "../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base_dir); + let migration = prepare_migration(base_dir.into(), target_dir.into()).unwrap(); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + execute_strategy(&ws, &migration, &account, None).await.unwrap(); + let world_address = &format!("0x{:x}", &migration.world_address().unwrap()); + let account_address = &format!("0x{:x}", account.address()); + let private_key = &format!("0x{:x}", sequencer.raw_account().private_key); + let rpc_url = &sequencer.url().to_string(); + + let moves_model = + migration.models.iter().find(|m| m.diff.name == "dojo_examples::models::moves").unwrap(); + let moves_model_class_hash = &format!("0x{:x}", moves_model.diff.local); + let args_vec = [ + "register", + "model", + moves_model_class_hash, + "--world", + world_address, + "--account-address", + account_address, + "--rpc-url", + rpc_url, + "--private-key", + private_key, + ]; + + let assert = get_snapbox().args(args_vec.iter()).assert().success(); + assert!(format!("{:?}", assert.get_output()).contains("No new models to register")); +} diff --git a/bin/sozo/tests/utils/stdout.rs b/bin/sozo/tests/utils/stdout.rs index 62373ec707..8b14dbcf83 100644 --- a/bin/sozo/tests/utils/stdout.rs +++ b/bin/sozo/tests/utils/stdout.rs @@ -2,6 +2,7 @@ use std::fs; const STDOUT_DIR: &str = "tests/fixtures/stdout/"; +#[allow(dead_code)] pub fn expected_stdout(cmd: &str) -> String { let without_whitespaces: String = cmd.split_whitespace().collect(); let file_name = without_whitespaces.replace('-', "_"); diff --git a/bin/torii/Cargo.toml b/bin/torii/Cargo.toml index 83e8abb272..eb01a20f41 100644 --- a/bin/torii/Cargo.toml +++ b/bin/torii/Cargo.toml @@ -45,6 +45,7 @@ tracing-subscriber.workspace = true tracing.workspace = true url.workspace = true torii-relay.workspace = true +webbrowser = "0.8" [dev-dependencies] camino.workspace = true diff --git a/bin/torii/src/main.rs b/bin/torii/src/main.rs index e5cbe2ac2d..f5cf30b273 100644 --- a/bin/torii/src/main.rs +++ b/bin/torii/src/main.rs @@ -27,6 +27,7 @@ use tokio::sync::broadcast; use tokio::sync::broadcast::Sender; use tokio_stream::StreamExt; use torii_core::engine::{Engine, EngineConfig, Processors}; +use torii_core::processors::event_message::EventMessageProcessor; use torii_core::processors::metadata_update::MetadataUpdateProcessor; use torii_core::processors::register_model::RegisterModelProcessor; use torii_core::processors::store_del_record::StoreDelRecordProcessor; @@ -36,7 +37,7 @@ use torii_core::simple_broker::SimpleBroker; use torii_core::sql::Sql; use torii_core::types::Model; use torii_server::proxy::Proxy; -use tracing::info; +use tracing::{error, info}; use tracing_subscriber::{fmt, EnvFilter}; use url::{form_urlencoded, Url}; @@ -98,6 +99,14 @@ struct Args { /// The metrics will be served at the given interface and port. #[arg(long, value_name = "SOCKET", value_parser = parse_socket_address, help_heading = "Metrics")] metrics: Option, + + /// Open World Explorer on the browser. + #[arg(long)] + explorer: bool, + + /// Chunk size of the events page when indexing using events + #[arg(long, default_value = "1000")] + events_chunk_size: u64, } #[tokio::main] @@ -145,13 +154,14 @@ async fn main() -> anyhow::Result<()> { // Get world address let world = WorldContractReader::new(args.world_address, &provider); - let mut db = Sql::new(pool.clone(), args.world_address).await?; + let db = Sql::new(pool.clone(), args.world_address).await?; let processors = Processors { event: vec![ Box::new(RegisterModelProcessor), Box::new(StoreSetRecordProcessor), Box::new(MetadataUpdateProcessor), Box::new(StoreDelRecordProcessor), + Box::new(EventMessageProcessor), ], transaction: vec![Box::new(StoreTransactionProcessor)], ..Processors::default() @@ -161,10 +171,14 @@ async fn main() -> anyhow::Result<()> { let mut engine = Engine::new( world, - &mut db, + db.clone(), &provider, processors, - EngineConfig { start_block: args.start_block, ..Default::default() }, + EngineConfig { + start_block: args.start_block, + events_chunk_size: args.events_chunk_size, + ..Default::default() + }, shutdown_tx.clone(), Some(block_tx), ); @@ -179,6 +193,15 @@ async fn main() -> anyhow::Result<()> { ) .await?; + let mut libp2p_relay_server = torii_relay::server::Relay::new( + db, + args.relay_port, + args.relay_webrtc_port, + args.relay_local_key_path, + args.relay_cert_path, + ) + .expect("Failed to start libp2p relay server"); + let proxy_server = Arc::new(Proxy::new(args.addr, args.allowed_origins, Some(grpc_addr), None)); let graphql_server = spawn_rebuilding_graphql_server( @@ -188,22 +211,21 @@ async fn main() -> anyhow::Result<()> { proxy_server.clone(), ); - let mut libp2p_relay_server = torii_relay::server::Relay::new( - args.relay_port, - args.relay_webrtc_port, - args.relay_local_key_path, - args.relay_cert_path, - ) - .expect("Failed to start libp2p relay server"); - let endpoint = format!("http://{}", args.addr); let gql_endpoint = format!("{}/graphql", endpoint); let encoded: String = form_urlencoded::byte_serialize(gql_endpoint.replace("0.0.0.0", "localhost").as_bytes()) .collect(); + let explorer_url = format!("https://worlds.dev/torii?url={}", encoded); info!(target: "torii::cli", "Starting torii endpoint: {}", endpoint); info!(target: "torii::cli", "Serving Graphql playground: {}", gql_endpoint); - info!(target: "torii::cli", "World Explorer is available on: {}\n", format!("https://worlds.dev/torii?url={}", encoded)); + info!(target: "torii::cli", "World Explorer is available on: {}\n", explorer_url); + + if args.explorer { + if let Err(e) = webbrowser::open(&explorer_url) { + error!("Failed to open World Explorer in the browser: {e}"); + } + } if let Some(listen_addr) = args.metrics { let prometheus_handle = prometheus_exporter::install_recorder("torii")?; diff --git a/crates/benches/Cargo.toml b/crates/benches/Cargo.toml index 0ae0ae4923..2a84b79e42 100644 --- a/crates/benches/Cargo.toml +++ b/crates/benches/Cargo.toml @@ -26,6 +26,7 @@ clap.workspace = true scarb.workspace = true dojo-lang.workspace = true dojo-world.workspace = true +sozo-ops.workspace = true [features] default = ["skip-benchmarks"] diff --git a/crates/benches/src/deployer.rs b/crates/benches/src/deployer.rs index e14ace0440..6a02e4c905 100644 --- a/crates/benches/src/deployer.rs +++ b/crates/benches/src/deployer.rs @@ -6,14 +6,13 @@ use clap::Parser; use dojo_lang::compiler::{DojoCompiler, DEPLOYMENTS_DIR, MANIFESTS_DIR}; use dojo_lang::plugin::CairoPluginRepository; use dojo_lang::scarb_internal::compile_workspace; -use dojo_world::manifest::DeployedManifest; +use dojo_world::manifest::DeploymentManifest; use futures::executor::block_on; use katana_runner::KatanaRunner; use scarb::compiler::CompilerRepository; use scarb::core::{Config, TargetKind}; use scarb::ops::CompileOpts; use sozo::args::{Commands, SozoArgs}; -use sozo::ops::migration; use starknet::core::types::FieldElement; use starknet::core::utils::parse_cairo_short_string; use starknet::providers::Provider; @@ -115,9 +114,9 @@ async fn prepare_migration_args(args: SozoArgs) -> Result { let chain_id = migrate.starknet.provider(None).unwrap().chain_id().await.unwrap(); let chain_id = parse_cairo_short_string(&chain_id).unwrap(); - migration::execute(&ws, migrate, None).await?; + migrate.run(&config)?; - let manifest = DeployedManifest::load_from_path( + let manifest = DeploymentManifest::load_from_path( &manifest_dir .join(MANIFESTS_DIR) .join(DEPLOYMENTS_DIR) diff --git a/crates/dojo-core/src/base_test.cairo b/crates/dojo-core/src/base_test.cairo index f783d85e33..78211fd2f4 100644 --- a/crates/dojo-core/src/base_test.cairo +++ b/crates/dojo-core/src/base_test.cairo @@ -145,7 +145,7 @@ mod invalid_model { impl InvalidModelSelector of super::IMetadataOnly { fn selector(self: @ContractState) -> felt252 { // Pre-computed address of a contract deployed through the world. - 0xa4a104a045a21149f250a92784d614fc8748d6712653c824c07f1bf25d314a + 0x78d01dde6c9d61e26dfc3fcc8d1dea6fd86e3afe324bf24c61ebc1e82fad12a } fn name(self: @ContractState) -> ByteArray { @@ -180,8 +180,10 @@ fn test_deploy_from_world_invalid_model() { let world = deploy_world(); let base_address = world.deploy_contract(0, base::TEST_CLASS_HASH.try_into().unwrap()); - // The print is required for invalid_model name to be a valid address as the - // register_model will use the gas consumed as salt. + + // This print allows to know the address of the deployed contract which must be returned + // by the selector() function of invalid model, to simulate a ACL issue + // (see register_model function) base_address.print(); world.register_model(invalid_model::TEST_CLASS_HASH.try_into().unwrap()); diff --git a/crates/dojo-core/src/resource_metadata.cairo b/crates/dojo-core/src/resource_metadata.cairo index 32c1aba612..e7babca7f2 100644 --- a/crates/dojo-core/src/resource_metadata.cairo +++ b/crates/dojo-core/src/resource_metadata.cairo @@ -5,7 +5,7 @@ //! use dojo::world::IWorldDispatcherTrait; -const RESOURCE_METADATA_MODEL: felt252 = selector!("ResourceMetadata"); +const RESOURCE_METADATA_SELECTOR: felt252 = selector!("ResourceMetadata"); fn initial_address() -> starknet::ContractAddress { starknet::contract_address_const::<0>() @@ -29,7 +29,7 @@ impl ResourceMetadataModel of dojo::model::Model { fn entity( world: dojo::world::IWorldDispatcher, keys: Span, layout: Span ) -> ResourceMetadata { - let values = world.entity(selector!("ResourceMetadata"), keys, layout); + let values = world.entity(RESOURCE_METADATA_SELECTOR, keys, layout); let mut serialized = core::array::ArrayTrait::new(); core::array::serialize_array_helper(keys, ref serialized); core::array::serialize_array_helper(values, ref serialized); @@ -57,7 +57,7 @@ impl ResourceMetadataModel of dojo::model::Model { #[inline(always)] fn selector(self: @ResourceMetadata) -> felt252 { - selector!("ResourceMetadata") + RESOURCE_METADATA_SELECTOR } #[inline(always)] @@ -136,13 +136,14 @@ impl ResourceMetadataIntrospect<> of dojo::database::introspect::Introspect felt252 { - selector!("ResourceMetadata") + RESOURCE_METADATA_SELECTOR } fn name(self: @ContractState) -> ByteArray { diff --git a/crates/dojo-core/src/world.cairo b/crates/dojo-core/src/world.cairo index 1aa9682425..601ac33303 100644 --- a/crates/dojo-core/src/world.cairo +++ b/crates/dojo-core/src/world.cairo @@ -76,7 +76,7 @@ mod world { use dojo::model::Model; use dojo::world::{IWorldDispatcher, IWorld, IUpgradeableWorld}; use dojo::resource_metadata; - use dojo::resource_metadata::{ResourceMetadata, RESOURCE_METADATA_MODEL}; + use dojo::resource_metadata::{ResourceMetadata, RESOURCE_METADATA_SELECTOR}; use super::Errors; @@ -182,7 +182,7 @@ mod world { self .models .write( - RESOURCE_METADATA_MODEL, + RESOURCE_METADATA_SELECTOR, (resource_metadata::initial_class_hash(), resource_metadata::initial_address()) ); @@ -201,7 +201,7 @@ mod world { Introspect::::layout(ref layout); let mut data = self - .entity(RESOURCE_METADATA_MODEL, array![resource_id].span(), layout.span(),); + .entity(RESOURCE_METADATA_SELECTOR, array![resource_id].span(), layout.span(),); let mut model = array![resource_id]; core::array::serialize_array_helper(data, ref model); diff --git a/crates/dojo-lang/src/inline_macros/emit.rs b/crates/dojo-lang/src/inline_macros/emit.rs index 662d6d704d..05b5b5533e 100644 --- a/crates/dojo-lang/src/inline_macros/emit.rs +++ b/crates/dojo-lang/src/inline_macros/emit.rs @@ -6,6 +6,8 @@ use cairo_lang_diagnostics::Severity; use cairo_lang_semantic::inline_macros::unsupported_bracket_diagnostic; use cairo_lang_syntax::node::{ast, TypedSyntaxNode}; +use super::unsupported_arg_diagnostic; + #[derive(Debug, Default)] pub struct EmitMacro; @@ -23,11 +25,7 @@ impl InlineMacroExprPlugin for EmitMacro { return unsupported_bracket_diagnostic(db, syntax); }; let mut builder = PatchBuilder::new(db); - builder.add_str( - "{ - let mut keys = Default::::default(); - let mut data = Default::::default();", - ); + builder.add_str("{"); let args = arg_list.arguments(db).elements(db); @@ -36,25 +34,85 @@ impl InlineMacroExprPlugin for EmitMacro { code: None, diagnostics: vec![PluginDiagnostic { stable_ptr: arg_list.arguments(db).stable_ptr().untyped(), - message: "Invalid arguments. Expected \"emit!(world, event)\"".to_string(), + message: "Invalid arguments. Expected \"emit!(world, models,)\"".to_string(), severity: Severity::Error, }], }; } let world = &args[0]; - let event = &args[1]; - - builder.add_str( - "\n starknet::Event::append_keys_and_data(@core::traits::Into::<_, \ - Event>::into(", - ); - builder.add_node(event.as_syntax_node()); - builder.add_str("), ref keys, ref data);"); - - builder.add_str("\n "); - builder.add_node(world.as_syntax_node()); - builder.add_str(".emit(keys, data.span());"); + + let ast::ArgClause::Unnamed(models) = args[1].arg_clause(db) else { + return unsupported_arg_diagnostic(db, syntax); + }; + + let mut bundle = vec![]; + + match models.value(db) { + ast::Expr::Parenthesized(parens) => { + let syntax_node = parens.expr(db).as_syntax_node(); + bundle.push((syntax_node.get_text(db), syntax_node)); + } + ast::Expr::Tuple(list) => { + list.expressions(db).elements(db).into_iter().for_each(|expr| { + let syntax_node = expr.as_syntax_node(); + bundle.push((syntax_node.get_text(db), syntax_node)); + }) + } + ast::Expr::StructCtorCall(ctor) => { + let syntax_node = ctor.as_syntax_node(); + bundle.push((syntax_node.get_text(db), syntax_node)); + } + _ => { + return InlinePluginResult { + code: None, + diagnostics: vec![PluginDiagnostic { + message: "Invalid arguments. Expected \"(world, (models,))\"".to_string(), + stable_ptr: arg_list.arguments(db).stable_ptr().untyped(), + severity: Severity::Error, + }], + }; + } + } + + if bundle.is_empty() { + return InlinePluginResult { + code: None, + diagnostics: vec![PluginDiagnostic { + message: "Invalid arguments: No models provided.".to_string(), + stable_ptr: arg_list.arguments(db).stable_ptr().untyped(), + severity: Severity::Error, + }], + }; + } + + for (event, _) in bundle { + builder.add_str("{"); + + builder.add_str( + " + let mut keys = Default::::default(); + let mut data = Default::::default();", + ); + + builder.add_str(&format!( + "keys.append(selector!(\"{}\"));", + event.split_whitespace().next().unwrap() + )); + + builder.add_str(&format!( + " + starknet::Event::append_keys_and_data(@{event}, ref keys, ref data);", + event = event + )); + + builder.add_str("\n "); + builder.add_node(world.as_syntax_node()); + builder.add_str(".emit(keys, data.span());"); + + builder.add_str("}"); + } + builder.add_str("}"); InlinePluginResult { diff --git a/crates/dojo-lang/src/introspect.rs b/crates/dojo-lang/src/introspect.rs index 0c9feb61d0..827690dbbf 100644 --- a/crates/dojo-lang/src/introspect.rs +++ b/crates/dojo-lang/src/introspect.rs @@ -144,6 +144,26 @@ pub fn handle_introspect_struct( handle_introspect_internal(db, name, struct_ast.generic_params(db), vec![], 0, type_ty, members) } +/// Generates enum arm type introspect +pub fn handle_enum_arm_type(ty_name: &String, is_primitive: bool) -> (String, String) { + let serialized = if is_primitive { + format!( + "dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Primitive('{}') + )", + ty_name + ) + } else { + format!( + "dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Introspect::<{}>::ty() + )", + ty_name + ) + }; + (serialized, ty_name.to_string()) +} + /// A handler for Dojo code derives Introspect for an enum /// Parameters: /// * db: The semantic database. @@ -155,6 +175,8 @@ pub fn handle_introspect_enum( diagnostics: &mut Vec, enum_ast: ItemEnum, ) -> RewriteNode { + let primitive_sizes = primitive_type_introspection(); + let name = enum_ast.name(db).text(db).into(); let variant_type = enum_ast.variants(db).elements(db).first().unwrap().type_clause(db); @@ -167,29 +189,15 @@ pub fn handle_introspect_enum( let args = (*paren_list.expressions(db)).elements(db); args.iter().for_each(|arg| { let ty_name = arg.as_syntax_node().get_text(db); - variant_type_arr.push(( - // Not using Ty right now, but still keeping it for later. - format!( - "dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Ty::Primitive('{}') - )", - ty_name - ), - ty_name, - )); + let is_primitive = primitive_sizes.get(&ty_name).is_some(); + + variant_type_arr.push(handle_enum_arm_type(&ty_name, is_primitive)); }); } else if let Expr::Path(type_path) = types_tuple.ty(db) { let ty_name = type_path.as_syntax_node().get_text(db); - variant_type_arr.push(( - // Not using Ty right now, but still keeping it for later. - format!( - "dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Introspect::<{}>::ty() - )", - ty_name - ), - ty_name, - )); + let is_primitive = primitive_sizes.get(&ty_name).is_some(); + + variant_type_arr.push(handle_enum_arm_type(&ty_name, is_primitive)); } else { diagnostics.push(PluginDiagnostic { stable_ptr: types_tuple.stable_ptr().0, diff --git a/crates/dojo-lang/src/model.rs b/crates/dojo-lang/src/model.rs index ca241f02a5..011e10b57a 100644 --- a/crates/dojo-lang/src/model.rs +++ b/crates/dojo-lang/src/model.rs @@ -1,7 +1,9 @@ +use std::cmp::Ordering; + use cairo_lang_defs::patcher::RewriteNode; use cairo_lang_defs::plugin::PluginDiagnostic; use cairo_lang_diagnostics::Severity; -use cairo_lang_syntax::node::ast::ItemStruct; +use cairo_lang_syntax::node::ast::{ArgClause, Expr, ItemStruct, OptionArgListParenthesized}; use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::helpers::QueryAttrs; use cairo_lang_syntax::node::{Terminal, TypedSyntaxNode}; @@ -9,7 +11,129 @@ use cairo_lang_utils::unordered_hash_map::UnorderedHashMap; use convert_case::{Case, Casing}; use dojo_world::manifest::Member; -use crate::plugin::{DojoAuxData, Model}; +use crate::plugin::{DojoAuxData, Model, DOJO_MODEL_ATTR}; + +const CURRENT_MODEL_VERSION: u8 = 1; +const MODEL_VERSION_NAME: &str = "version"; + +/// Get the version associated with the dojo::model attribute. +/// +/// Note: dojo::model attribute has already been checked so there is one and only one attribute. +/// +/// Parameters: +/// * db: The semantic database. +/// * struct_ast: The AST of the model struct. +/// * diagnostics: vector of compiler diagnostics. +/// +/// Returns: +/// * The model version associated with the dojo:model attribute. +pub fn get_model_version( + db: &dyn SyntaxGroup, + struct_ast: ItemStruct, + diagnostics: &mut Vec, +) -> u8 { + if let OptionArgListParenthesized::ArgListParenthesized(arguments) = + struct_ast.attributes(db).query_attr(db, DOJO_MODEL_ATTR).first().unwrap().arguments(db) + { + let version_args = arguments + .arguments(db) + .elements(db) + .iter() + .filter_map(|a| match a.arg_clause(db) { + ArgClause::Named(x) => { + let arg_name = x.name(db).text(db).to_string(); + if arg_name.eq(MODEL_VERSION_NAME) { + Some(x.value(db)) + } else { + diagnostics.push(PluginDiagnostic { + message: format!("Unexpected argument '{}' for dojo::model", arg_name), + stable_ptr: x.stable_ptr().untyped(), + severity: Severity::Warning, + }); + None + } + } + ArgClause::Unnamed(x) => { + diagnostics.push(PluginDiagnostic { + message: format!( + "Unexpected argument '{}' for dojo::model", + x.as_syntax_node().get_text(db) + ), + stable_ptr: x.stable_ptr().untyped(), + severity: Severity::Warning, + }); + None + } + ArgClause::FieldInitShorthand(x) => { + diagnostics.push(PluginDiagnostic { + message: format!( + "Unexpected argument '{}' for dojo::model", + x.name(db).name(db).text(db).to_string() + ), + stable_ptr: x.stable_ptr().untyped(), + severity: Severity::Warning, + }); + None + } + }) + .collect::>(); + + let version = match version_args.len().cmp(&1) { + Ordering::Equal => match version_args.first().unwrap() { + Expr::Literal(v) => { + if let Ok(int_value) = v.text(db).parse::() { + if int_value <= CURRENT_MODEL_VERSION { + Some(int_value) + } else { + diagnostics.push(PluginDiagnostic { + message: format!("dojo::model version {} not supported", int_value), + stable_ptr: v.stable_ptr().untyped(), + severity: Severity::Error, + }); + None + } + } else { + diagnostics.push(PluginDiagnostic { + message: format!( + "The argument '{}' of dojo::model must be an integer", + MODEL_VERSION_NAME + ), + stable_ptr: struct_ast.stable_ptr().untyped(), + severity: Severity::Error, + }); + None + } + } + _ => { + diagnostics.push(PluginDiagnostic { + message: format!( + "The argument '{}' of dojo::model must be an integer", + MODEL_VERSION_NAME + ), + stable_ptr: struct_ast.stable_ptr().untyped(), + severity: Severity::Error, + }); + None + } + }, + Ordering::Greater => { + diagnostics.push(PluginDiagnostic { + message: format!( + "Too many '{}' attributes for dojo::model", + MODEL_VERSION_NAME + ), + stable_ptr: struct_ast.stable_ptr().untyped(), + severity: Severity::Error, + }); + None + } + Ordering::Less => None, + }; + + return if let Some(v) = version { v } else { CURRENT_MODEL_VERSION }; + } + CURRENT_MODEL_VERSION +} /// A handler for Dojo code that modifies a model struct. /// Parameters: @@ -24,6 +148,17 @@ pub fn handle_model_struct( ) -> (RewriteNode, Vec) { let mut diagnostics = vec![]; + let version = get_model_version(db, struct_ast.clone(), &mut diagnostics); + + let model_name = struct_ast.name(db).as_syntax_node().get_text(db).trim().to_string(); + let (model_version, model_selector) = match version { + 0 => (RewriteNode::Text("0".to_string()), RewriteNode::Text(format!("\"{model_name}\""))), + _ => ( + RewriteNode::Text(CURRENT_MODEL_VERSION.to_string()), + RewriteNode::Text(format!("selector!(\"{model_name}\")")), + ), + }; + let elements = struct_ast.members(db).elements(db); let members: &Vec<_> = &elements .iter() @@ -114,12 +249,12 @@ impl $type_name$Model of dojo::model::Model<$type_name$> { #[inline(always)] fn version(self: @$type_name$) -> u8 { - 1 + $model_version$ } #[inline(always)] fn selector(self: @$type_name$) -> felt252 { - selector!(\"$type_name$\") + $model_selector$ } #[inline(always)] @@ -166,7 +301,7 @@ mod $contract_name$ { #[abi(embed_v0)] impl DojoModelImpl of dojo::model::IModel{ fn selector(self: @ContractState) -> felt252 { - selector!(\"$type_name$\") + $model_selector$ } fn name(self: @ContractState) -> ByteArray { @@ -174,7 +309,7 @@ mod $contract_name$ { } fn version(self: @ContractState) -> u8 { - 1 + $model_version$ } fn unpacked_size(self: @ContractState) -> usize { @@ -208,13 +343,12 @@ mod $contract_name$ { ", &UnorderedHashMap::from([ ("contract_name".to_string(), RewriteNode::Text(name.to_case(Case::Snake))), - ( - "type_name".to_string(), - RewriteNode::new_trimmed(struct_ast.name(db).as_syntax_node()), - ), + ("type_name".to_string(), RewriteNode::Text(model_name)), ("namespace".to_string(), RewriteNode::Text("namespace".to_string())), ("serialized_keys".to_string(), RewriteNode::new_modified(serialized_keys)), ("serialized_values".to_string(), RewriteNode::new_modified(serialized_values)), + ("model_version".to_string(), model_version), + ("model_selector".to_string(), model_selector), ]), ), diagnostics, diff --git a/crates/dojo-lang/src/plugin.rs b/crates/dojo-lang/src/plugin.rs index ca439c0aeb..19889fe0f5 100644 --- a/crates/dojo-lang/src/plugin.rs +++ b/crates/dojo-lang/src/plugin.rs @@ -1,3 +1,5 @@ +use std::cmp::Ordering; + use anyhow::Result; use cairo_lang_defs::patcher::PatchBuilder; use cairo_lang_defs::plugin::{ @@ -35,7 +37,7 @@ use crate::print::{handle_print_enum, handle_print_struct}; const DOJO_CONTRACT_ATTR: &str = "dojo::contract"; const DOJO_INTERFACE_ATTR: &str = "dojo::interface"; -const DOJO_MODEL_ATTR: &str = "dojo::model"; +pub const DOJO_MODEL_ATTR: &str = "dojo::model"; #[derive(Clone, Debug, PartialEq)] pub struct Model { @@ -63,11 +65,7 @@ impl GeneratedFileAuxData for DojoAuxData { self } fn eq(&self, other: &dyn GeneratedFileAuxData) -> bool { - if let Some(other) = other.as_any().downcast_ref::() { - self == other - } else { - false - } + if let Some(other) = other.as_any().downcast_ref::() { self == other } else { false } } } @@ -85,11 +83,7 @@ impl GeneratedFileAuxData for ComputedValuesAuxData { self } fn eq(&self, other: &dyn GeneratedFileAuxData) -> bool { - if let Some(other) = other.as_any().downcast_ref::() { - self == other - } else { - false - } + if let Some(other) = other.as_any().downcast_ref::() { self == other } else { false } } } @@ -401,11 +395,24 @@ impl MacroPlugin for BuiltinDojoPlugin { } } - for _ in struct_ast.attributes(db).query_attr(db, "dojo::model") { - let (model_rewrite_nodes, model_diagnostics) = - handle_model_struct(db, &mut aux_data, struct_ast.clone()); - rewrite_nodes.push(model_rewrite_nodes); - diagnostics.extend(model_diagnostics); + let attributes = struct_ast.attributes(db).query_attr(db, DOJO_MODEL_ATTR); + + match attributes.len().cmp(&1) { + Ordering::Equal => { + let (model_rewrite_nodes, model_diagnostics) = + handle_model_struct(db, &mut aux_data, struct_ast.clone()); + rewrite_nodes.push(model_rewrite_nodes); + diagnostics.extend(model_diagnostics); + } + Ordering::Greater => { + diagnostics.push(PluginDiagnostic { + message: "A Dojo model must have zero or one dojo::model attribute." + .into(), + stable_ptr: struct_ast.stable_ptr().untyped(), + severity: Severity::Error, + }); + } + _ => {} } if rewrite_nodes.is_empty() { diff --git a/crates/dojo-lang/src/plugin_test_data/introspect b/crates/dojo-lang/src/plugin_test_data/introspect index 210ad28b58..872d50f34f 100644 --- a/crates/dojo-lang/src/plugin_test_data/introspect +++ b/crates/dojo-lang/src/plugin_test_data/introspect @@ -19,13 +19,13 @@ enum PlainEnum { } #[derive(Serde, Copy, Drop, Introspect)] -enum EnumPrimitive { +enum EnumTupleOnePrimitive { Left: (u16,), Right: (u16,), } #[derive(Serde, Copy, Drop, Introspect)] -enum EnumTuple { +enum EnumTupleSeveralPrimitive { Left: (u8, u8), Right: (u8, u8), } @@ -36,6 +36,18 @@ enum EnumCustom { Right: Vec2, } +#[derive(Serde, Copy, Drop, Introspect)] +enum EnumPrimitive{ + Left: u64, + Right: u64 +} + +#[derive(Serde, Copy, Drop, Introspect)] +enum EnumTupleMix{ + Left: (Vec2, u64, EnumCustom), + Right: (Vec2, u64, EnumCustom), +} + #[derive(Copy, Drop, Introspect)] struct Position { #[key] @@ -84,13 +96,13 @@ enum PlainEnum { } #[derive(Serde, Copy, Drop, Introspect)] -enum EnumPrimitive { +enum EnumTupleOnePrimitive { Left: (u16,), Right: (u16,), } #[derive(Serde, Copy, Drop, Introspect)] -enum EnumTuple { +enum EnumTupleSeveralPrimitive { Left: (u8, u8), Right: (u8, u8), } @@ -101,6 +113,18 @@ enum EnumCustom { Right: Vec2, } +#[derive(Serde, Copy, Drop, Introspect)] +enum EnumPrimitive{ + Left: u64, + Right: u64 +} + +#[derive(Serde, Copy, Drop, Introspect)] +enum EnumTupleMix{ + Left: (Vec2, u64, EnumCustom), + Right: (Vec2, u64, EnumCustom), +} + #[derive(Copy, Drop, Introspect)] struct Position { #[key] @@ -230,28 +254,28 @@ impl PlainEnumIntrospect<> of dojo::database::introspect::Introspect ) } } -impl EnumPrimitiveSerde of core::serde::Serde:: { - fn serialize(self: @EnumPrimitive, ref output: core::array::Array) { +impl EnumTupleOnePrimitiveSerde of core::serde::Serde:: { + fn serialize(self: @EnumTupleOnePrimitive, ref output: core::array::Array) { match self { - EnumPrimitive::Left(x) => { core::serde::Serde::serialize(@0, ref output); core::serde::Serde::serialize(x, ref output); }, - EnumPrimitive::Right(x) => { core::serde::Serde::serialize(@1, ref output); core::serde::Serde::serialize(x, ref output); }, + EnumTupleOnePrimitive::Left(x) => { core::serde::Serde::serialize(@0, ref output); core::serde::Serde::serialize(x, ref output); }, + EnumTupleOnePrimitive::Right(x) => { core::serde::Serde::serialize(@1, ref output); core::serde::Serde::serialize(x, ref output); }, } } - fn deserialize(ref serialized: core::array::Span) -> core::option::Option { + fn deserialize(ref serialized: core::array::Span) -> core::option::Option { let idx: felt252 = core::serde::Serde::deserialize(ref serialized)?; core::option::Option::Some( match idx { - 0 => EnumPrimitive::Left(core::serde::Serde::deserialize(ref serialized)?), - 1 => EnumPrimitive::Right(core::serde::Serde::deserialize(ref serialized)?), + 0 => EnumTupleOnePrimitive::Left(core::serde::Serde::deserialize(ref serialized)?), + 1 => EnumTupleOnePrimitive::Right(core::serde::Serde::deserialize(ref serialized)?), _ => { return core::option::Option::None; } } ) } } -impl EnumPrimitiveCopy of core::traits::Copy::; -impl EnumPrimitiveDrop of core::traits::Drop::; +impl EnumTupleOnePrimitiveCopy of core::traits::Copy::; +impl EnumTupleOnePrimitiveDrop of core::traits::Drop::; -impl EnumPrimitiveIntrospect<> of dojo::database::introspect::Introspect> { +impl EnumTupleOnePrimitiveIntrospect<> of dojo::database::introspect::Introspect> { #[inline(always)] fn size() -> usize { 2 @@ -268,48 +292,48 @@ layout.append(16); fn ty() -> dojo::database::introspect::Ty { dojo::database::introspect::Ty::Enum( dojo::database::introspect::Enum { - name: 'EnumPrimitive', + name: 'EnumTupleOnePrimitive', attrs: array![].span(), children: array![( 'Left', dojo::database::introspect::serialize_member_type( @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Ty::Primitive('u16') - )].span())) + @dojo::database::introspect::Ty::Primitive('u16') + )].span())) ), ( 'Right', dojo::database::introspect::serialize_member_type( @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Ty::Primitive('u16') - )].span())) + @dojo::database::introspect::Ty::Primitive('u16') + )].span())) )].span() } ) } } -impl EnumTupleSerde of core::serde::Serde:: { - fn serialize(self: @EnumTuple, ref output: core::array::Array) { +impl EnumTupleSeveralPrimitiveSerde of core::serde::Serde:: { + fn serialize(self: @EnumTupleSeveralPrimitive, ref output: core::array::Array) { match self { - EnumTuple::Left(x) => { core::serde::Serde::serialize(@0, ref output); core::serde::Serde::serialize(x, ref output); }, - EnumTuple::Right(x) => { core::serde::Serde::serialize(@1, ref output); core::serde::Serde::serialize(x, ref output); }, + EnumTupleSeveralPrimitive::Left(x) => { core::serde::Serde::serialize(@0, ref output); core::serde::Serde::serialize(x, ref output); }, + EnumTupleSeveralPrimitive::Right(x) => { core::serde::Serde::serialize(@1, ref output); core::serde::Serde::serialize(x, ref output); }, } } - fn deserialize(ref serialized: core::array::Span) -> core::option::Option { + fn deserialize(ref serialized: core::array::Span) -> core::option::Option { let idx: felt252 = core::serde::Serde::deserialize(ref serialized)?; core::option::Option::Some( match idx { - 0 => EnumTuple::Left(core::serde::Serde::deserialize(ref serialized)?), - 1 => EnumTuple::Right(core::serde::Serde::deserialize(ref serialized)?), + 0 => EnumTupleSeveralPrimitive::Left(core::serde::Serde::deserialize(ref serialized)?), + 1 => EnumTupleSeveralPrimitive::Right(core::serde::Serde::deserialize(ref serialized)?), _ => { return core::option::Option::None; } } ) } } -impl EnumTupleCopy of core::traits::Copy::; -impl EnumTupleDrop of core::traits::Drop::; +impl EnumTupleSeveralPrimitiveCopy of core::traits::Copy::; +impl EnumTupleSeveralPrimitiveDrop of core::traits::Drop::; -impl EnumTupleIntrospect<> of dojo::database::introspect::Introspect> { +impl EnumTupleSeveralPrimitiveIntrospect<> of dojo::database::introspect::Introspect> { #[inline(always)] fn size() -> usize { 3 @@ -327,25 +351,25 @@ layout.append(8); fn ty() -> dojo::database::introspect::Ty { dojo::database::introspect::Ty::Enum( dojo::database::introspect::Enum { - name: 'EnumTuple', + name: 'EnumTupleSeveralPrimitive', attrs: array![].span(), children: array![( 'Left', dojo::database::introspect::serialize_member_type( @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Ty::Primitive('u8') - ), dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Ty::Primitive('u8') - )].span())) + @dojo::database::introspect::Ty::Primitive('u8') + ), dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Primitive('u8') + )].span())) ), ( 'Right', dojo::database::introspect::serialize_member_type( @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Ty::Primitive('u8') - ), dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Ty::Primitive('u8') - )].span())) + @dojo::database::introspect::Ty::Primitive('u8') + ), dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Primitive('u8') + )].span())) )].span() } ) @@ -395,15 +419,141 @@ dojo::database::introspect::Introspect::::layout(ref layout); 'Left', dojo::database::introspect::serialize_member_type( @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Introspect::::ty() - )].span())) + @dojo::database::introspect::Introspect::::ty() + )].span())) ), ( 'Right', dojo::database::introspect::serialize_member_type( @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( - @dojo::database::introspect::Introspect::::ty() - )].span())) + @dojo::database::introspect::Introspect::::ty() + )].span())) + )].span() + } + ) + } +} +impl EnumPrimitiveSerde of core::serde::Serde:: { + fn serialize(self: @EnumPrimitive, ref output: core::array::Array) { + match self { + EnumPrimitive::Left(x) => { core::serde::Serde::serialize(@0, ref output); core::serde::Serde::serialize(x, ref output); }, + EnumPrimitive::Right(x) => { core::serde::Serde::serialize(@1, ref output); core::serde::Serde::serialize(x, ref output); }, + } + } + fn deserialize(ref serialized: core::array::Span) -> core::option::Option { + let idx: felt252 = core::serde::Serde::deserialize(ref serialized)?; + core::option::Option::Some( + match idx { + 0 => EnumPrimitive::Left(core::serde::Serde::deserialize(ref serialized)?), + 1 => EnumPrimitive::Right(core::serde::Serde::deserialize(ref serialized)?), + _ => { return core::option::Option::None; } + } + ) + } +} +impl EnumPrimitiveCopy of core::traits::Copy::; +impl EnumPrimitiveDrop of core::traits::Drop::; + +impl EnumPrimitiveIntrospect<> of dojo::database::introspect::Introspect> { + #[inline(always)] + fn size() -> usize { + 2 + } + + #[inline(always)] + fn layout(ref layout: Array) { + layout.append(8); +layout.append(64); + + } + + #[inline(always)] + fn ty() -> dojo::database::introspect::Ty { + dojo::database::introspect::Ty::Enum( + dojo::database::introspect::Enum { + name: 'EnumPrimitive', + attrs: array![].span(), + children: array![( + 'Left', + dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Primitive('u64') + )].span())) + ), +( + 'Right', + dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Primitive('u64') + )].span())) + )].span() + } + ) + } +} +impl EnumTupleMixSerde of core::serde::Serde:: { + fn serialize(self: @EnumTupleMix, ref output: core::array::Array) { + match self { + EnumTupleMix::Left(x) => { core::serde::Serde::serialize(@0, ref output); core::serde::Serde::serialize(x, ref output); }, + EnumTupleMix::Right(x) => { core::serde::Serde::serialize(@1, ref output); core::serde::Serde::serialize(x, ref output); }, + } + } + fn deserialize(ref serialized: core::array::Span) -> core::option::Option { + let idx: felt252 = core::serde::Serde::deserialize(ref serialized)?; + core::option::Option::Some( + match idx { + 0 => EnumTupleMix::Left(core::serde::Serde::deserialize(ref serialized)?), + 1 => EnumTupleMix::Right(core::serde::Serde::deserialize(ref serialized)?), + _ => { return core::option::Option::None; } + } + ) + } +} +impl EnumTupleMixCopy of core::traits::Copy::; +impl EnumTupleMixDrop of core::traits::Drop::; + +impl EnumTupleMixIntrospect<> of dojo::database::introspect::Introspect> { + #[inline(always)] + fn size() -> usize { + dojo::database::introspect::Introspect::::size() + dojo::database::introspect::Introspect::::size() + 2 + } + + #[inline(always)] + fn layout(ref layout: Array) { + layout.append(8); +dojo::database::introspect::Introspect::::layout(ref layout); +layout.append(64); +dojo::database::introspect::Introspect::::layout(ref layout); + + } + + #[inline(always)] + fn ty() -> dojo::database::introspect::Ty { + dojo::database::introspect::Ty::Enum( + dojo::database::introspect::Enum { + name: 'EnumTupleMix', + attrs: array![].span(), + children: array![( + 'Left', + dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Introspect::::ty() + ), dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Primitive('u64') + ), dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Introspect::::ty() + )].span())) + ), +( + 'Right', + dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Tuple(array![dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Introspect::::ty() + ), dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Ty::Primitive('u64') + ), dojo::database::introspect::serialize_member_type( + @dojo::database::introspect::Introspect::::ty() + )].span())) )].span() } ) @@ -626,26 +776,26 @@ impl FeltsArrayBadCapacityIntrospect expected_diagnostics error: Unsupported attribute. - --> test_src/lib.cairo:49:5 + --> test_src/lib.cairo:61:5 #[capacity(10)] ^*************^ error: Capacity is only supported for Array or Span. - --> test_src/lib.cairo:55:5 + --> test_src/lib.cairo:67:5 #[capacity(10)] ^*************^ error: Unsupported attribute. - --> test_src/lib.cairo:55:5 + --> test_src/lib.cairo:67:5 #[capacity(10)] ^*************^ error: Capacity must be greater than 0. - --> test_src/lib.cairo:61:5 + --> test_src/lib.cairo:73:5 #[capacity(0)] ^************^ error: Unsupported attribute. - --> test_src/lib.cairo:61:5 + --> test_src/lib.cairo:73:5 #[capacity(0)] ^************^ diff --git a/crates/dojo-lang/src/plugin_test_data/model b/crates/dojo-lang/src/plugin_test_data/model index e2b30a016d..0339772912 100644 --- a/crates/dojo-lang/src/plugin_test_data/model +++ b/crates/dojo-lang/src/plugin_test_data/model @@ -1,9 +1,155 @@ -//! > Test expansion of the derive(Model). +//! > Test expansion of the dojo::model. //! > test_runner_name test_expand_plugin //! > cairo_code +#[dojo::model(version: 0)] +#[dojo::model(version: 0)] +struct BadModelMultipleAttr { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version: 0, version: 0)] +struct BadModelMultipleVersions { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version: hello)] +struct BadModelBadVersionType { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version)] +struct BadModelNoVersionValue { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(my_arg: 1)] +struct BadModelUnexpectedArgWithValue { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(my_arg)] +struct BadModelUnexpectedArg { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version: 2)] +struct BadModelNotSupportedVersion { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version: 0)] +struct Modelv0 { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model] +struct Position { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model] +struct Roles { + role_ids: Array +} + +#[dojo::model] +struct OnlyKeyModel { + #[key] + id: felt252 +} + +#[dojo::model] +struct Player { + #[key] + game: felt252, + #[key] + player: ContractAddress, + + name: felt252, +} + +#[dojo::model] +type OtherPlayer = Player; + +//! > expanded_cairo_code +#[dojo::model(version: 0)] +#[dojo::model(version: 0)] +struct BadModelMultipleAttr { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version: 0, version: 0)] +struct BadModelMultipleVersions { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version: hello)] +struct BadModelBadVersionType { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version)] +struct BadModelNoVersionValue { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(my_arg: 1)] +struct BadModelUnexpectedArgWithValue { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(my_arg)] +struct BadModelUnexpectedArg { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version: 2)] +struct BadModelNotSupportedVersion { + #[key] + id: felt252, + v: Vec3, +} + +#[dojo::model(version: 0)] +struct Modelv0 { + #[key] + id: felt252, + v: Vec3, +} + #[dojo::model] struct Position { #[key] @@ -11,61 +157,876 @@ struct Position { v: Vec3, } -#[dojo::model] -struct Roles { - role_ids: Array -} +#[dojo::model] +struct Roles { + role_ids: Array +} + +#[dojo::model] +struct OnlyKeyModel { + #[key] + id: felt252 +} + +#[dojo::model] +struct Player { + #[key] + game: felt252, + #[key] + player: ContractAddress, + + name: felt252, +} + +#[dojo::model] +type OtherPlayer = Player; + +impl BadModelMultipleVersionsModel of dojo::model::Model { + fn entity(world: dojo::world::IWorldDispatcher, keys: Span, layout: Span) -> BadModelMultipleVersions { + let values = dojo::world::IWorldDispatcherTrait::entity(world, selector!("BadModelMultipleVersions"), keys, layout); + + // TODO: Generate method to deserialize from keys / values directly to avoid + // serializing to intermediate array. + let mut serialized = core::array::ArrayTrait::new(); + core::array::serialize_array_helper(keys, ref serialized); + core::array::serialize_array_helper(values, ref serialized); + let mut serialized = core::array::ArrayTrait::span(@serialized); + + let entity = core::serde::Serde::::deserialize(ref serialized); + + if core::option::OptionTrait::::is_none(@entity) { + panic!( + "Model `BadModelMultipleVersions`: deserialization failed. Ensure the length of the keys tuple is matching the number of #[key] fields in the model struct." + ); + } + + core::option::OptionTrait::::unwrap(entity) + } + + #[inline(always)] + fn name(self: @BadModelMultipleVersions) -> ByteArray { + "BadModelMultipleVersions" + } + + #[inline(always)] + fn version(self: @BadModelMultipleVersions) -> u8 { + 1 + } + + #[inline(always)] + fn selector(self: @BadModelMultipleVersions) -> felt252 { + selector!("BadModelMultipleVersions") + } + + #[inline(always)] + fn keys(self: @BadModelMultipleVersions) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::array::ArrayTrait::append(ref serialized, *self.id); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn values(self: @BadModelMultipleVersions) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::serde::Serde::serialize(self.v, ref serialized); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn layout(self: @BadModelMultipleVersions) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + #[inline(always)] + fn packed_size(self: @BadModelMultipleVersions) -> usize { + let mut layout = self.layout(); + dojo::packing::calculate_packed_size(ref layout) + } +} + +#[starknet::interface] +trait Ibad_model_multiple_versions { + fn ensure_abi(self: @T, model: BadModelMultipleVersions); +} + +#[starknet::contract] +mod bad_model_multiple_versions { + use super::BadModelMultipleVersions; + use super::Ibad_model_multiple_versions; + + #[storage] + struct Storage {} + + #[abi(embed_v0)] + impl DojoModelImpl of dojo::model::IModel{ + fn selector(self: @ContractState) -> felt252 { + selector!("BadModelMultipleVersions") + } + + fn name(self: @ContractState) -> ByteArray { + "BadModelMultipleVersions" + } + + fn version(self: @ContractState) -> u8 { + 1 + } + + fn unpacked_size(self: @ContractState) -> usize { + dojo::database::introspect::Introspect::::size() + } + + fn packed_size(self: @ContractState) -> usize { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + let mut layout_span = layout.span(); + dojo::packing::calculate_packed_size(ref layout_span) + } + + fn layout(self: @ContractState) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + fn schema(self: @ContractState) -> dojo::database::introspect::Ty { + dojo::database::introspect::Introspect::::ty() + } + } + + #[abi(embed_v0)] + impl bad_model_multiple_versionsImpl of Ibad_model_multiple_versions{ + fn ensure_abi(self: @ContractState, model: BadModelMultipleVersions) { + } + } +} + +impl BadModelBadVersionTypeModel of dojo::model::Model { + fn entity(world: dojo::world::IWorldDispatcher, keys: Span, layout: Span) -> BadModelBadVersionType { + let values = dojo::world::IWorldDispatcherTrait::entity(world, selector!("BadModelBadVersionType"), keys, layout); + + // TODO: Generate method to deserialize from keys / values directly to avoid + // serializing to intermediate array. + let mut serialized = core::array::ArrayTrait::new(); + core::array::serialize_array_helper(keys, ref serialized); + core::array::serialize_array_helper(values, ref serialized); + let mut serialized = core::array::ArrayTrait::span(@serialized); + + let entity = core::serde::Serde::::deserialize(ref serialized); + + if core::option::OptionTrait::::is_none(@entity) { + panic!( + "Model `BadModelBadVersionType`: deserialization failed. Ensure the length of the keys tuple is matching the number of #[key] fields in the model struct." + ); + } + + core::option::OptionTrait::::unwrap(entity) + } + + #[inline(always)] + fn name(self: @BadModelBadVersionType) -> ByteArray { + "BadModelBadVersionType" + } + + #[inline(always)] + fn version(self: @BadModelBadVersionType) -> u8 { + 1 + } + + #[inline(always)] + fn selector(self: @BadModelBadVersionType) -> felt252 { + selector!("BadModelBadVersionType") + } + + #[inline(always)] + fn keys(self: @BadModelBadVersionType) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::array::ArrayTrait::append(ref serialized, *self.id); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn values(self: @BadModelBadVersionType) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::serde::Serde::serialize(self.v, ref serialized); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn layout(self: @BadModelBadVersionType) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + #[inline(always)] + fn packed_size(self: @BadModelBadVersionType) -> usize { + let mut layout = self.layout(); + dojo::packing::calculate_packed_size(ref layout) + } +} + +#[starknet::interface] +trait Ibad_model_bad_version_type { + fn ensure_abi(self: @T, model: BadModelBadVersionType); +} + +#[starknet::contract] +mod bad_model_bad_version_type { + use super::BadModelBadVersionType; + use super::Ibad_model_bad_version_type; + + #[storage] + struct Storage {} + + #[abi(embed_v0)] + impl DojoModelImpl of dojo::model::IModel{ + fn selector(self: @ContractState) -> felt252 { + selector!("BadModelBadVersionType") + } + + fn name(self: @ContractState) -> ByteArray { + "BadModelBadVersionType" + } + + fn version(self: @ContractState) -> u8 { + 1 + } + + fn unpacked_size(self: @ContractState) -> usize { + dojo::database::introspect::Introspect::::size() + } + + fn packed_size(self: @ContractState) -> usize { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + let mut layout_span = layout.span(); + dojo::packing::calculate_packed_size(ref layout_span) + } + + fn layout(self: @ContractState) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + fn schema(self: @ContractState) -> dojo::database::introspect::Ty { + dojo::database::introspect::Introspect::::ty() + } + } + + #[abi(embed_v0)] + impl bad_model_bad_version_typeImpl of Ibad_model_bad_version_type{ + fn ensure_abi(self: @ContractState, model: BadModelBadVersionType) { + } + } +} + +impl BadModelNoVersionValueModel of dojo::model::Model { + fn entity(world: dojo::world::IWorldDispatcher, keys: Span, layout: Span) -> BadModelNoVersionValue { + let values = dojo::world::IWorldDispatcherTrait::entity(world, selector!("BadModelNoVersionValue"), keys, layout); + + // TODO: Generate method to deserialize from keys / values directly to avoid + // serializing to intermediate array. + let mut serialized = core::array::ArrayTrait::new(); + core::array::serialize_array_helper(keys, ref serialized); + core::array::serialize_array_helper(values, ref serialized); + let mut serialized = core::array::ArrayTrait::span(@serialized); + + let entity = core::serde::Serde::::deserialize(ref serialized); + + if core::option::OptionTrait::::is_none(@entity) { + panic!( + "Model `BadModelNoVersionValue`: deserialization failed. Ensure the length of the keys tuple is matching the number of #[key] fields in the model struct." + ); + } + + core::option::OptionTrait::::unwrap(entity) + } + + #[inline(always)] + fn name(self: @BadModelNoVersionValue) -> ByteArray { + "BadModelNoVersionValue" + } + + #[inline(always)] + fn version(self: @BadModelNoVersionValue) -> u8 { + 1 + } + + #[inline(always)] + fn selector(self: @BadModelNoVersionValue) -> felt252 { + selector!("BadModelNoVersionValue") + } + + #[inline(always)] + fn keys(self: @BadModelNoVersionValue) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::array::ArrayTrait::append(ref serialized, *self.id); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn values(self: @BadModelNoVersionValue) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::serde::Serde::serialize(self.v, ref serialized); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn layout(self: @BadModelNoVersionValue) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + #[inline(always)] + fn packed_size(self: @BadModelNoVersionValue) -> usize { + let mut layout = self.layout(); + dojo::packing::calculate_packed_size(ref layout) + } +} + +#[starknet::interface] +trait Ibad_model_no_version_value { + fn ensure_abi(self: @T, model: BadModelNoVersionValue); +} + +#[starknet::contract] +mod bad_model_no_version_value { + use super::BadModelNoVersionValue; + use super::Ibad_model_no_version_value; + + #[storage] + struct Storage {} + + #[abi(embed_v0)] + impl DojoModelImpl of dojo::model::IModel{ + fn selector(self: @ContractState) -> felt252 { + selector!("BadModelNoVersionValue") + } + + fn name(self: @ContractState) -> ByteArray { + "BadModelNoVersionValue" + } + + fn version(self: @ContractState) -> u8 { + 1 + } + + fn unpacked_size(self: @ContractState) -> usize { + dojo::database::introspect::Introspect::::size() + } + + fn packed_size(self: @ContractState) -> usize { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + let mut layout_span = layout.span(); + dojo::packing::calculate_packed_size(ref layout_span) + } + + fn layout(self: @ContractState) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + fn schema(self: @ContractState) -> dojo::database::introspect::Ty { + dojo::database::introspect::Introspect::::ty() + } + } + + #[abi(embed_v0)] + impl bad_model_no_version_valueImpl of Ibad_model_no_version_value{ + fn ensure_abi(self: @ContractState, model: BadModelNoVersionValue) { + } + } +} + +impl BadModelUnexpectedArgWithValueModel of dojo::model::Model { + fn entity(world: dojo::world::IWorldDispatcher, keys: Span, layout: Span) -> BadModelUnexpectedArgWithValue { + let values = dojo::world::IWorldDispatcherTrait::entity(world, selector!("BadModelUnexpectedArgWithValue"), keys, layout); + + // TODO: Generate method to deserialize from keys / values directly to avoid + // serializing to intermediate array. + let mut serialized = core::array::ArrayTrait::new(); + core::array::serialize_array_helper(keys, ref serialized); + core::array::serialize_array_helper(values, ref serialized); + let mut serialized = core::array::ArrayTrait::span(@serialized); + + let entity = core::serde::Serde::::deserialize(ref serialized); + + if core::option::OptionTrait::::is_none(@entity) { + panic!( + "Model `BadModelUnexpectedArgWithValue`: deserialization failed. Ensure the length of the keys tuple is matching the number of #[key] fields in the model struct." + ); + } + + core::option::OptionTrait::::unwrap(entity) + } + + #[inline(always)] + fn name(self: @BadModelUnexpectedArgWithValue) -> ByteArray { + "BadModelUnexpectedArgWithValue" + } + + #[inline(always)] + fn version(self: @BadModelUnexpectedArgWithValue) -> u8 { + 1 + } + + #[inline(always)] + fn selector(self: @BadModelUnexpectedArgWithValue) -> felt252 { + selector!("BadModelUnexpectedArgWithValue") + } + + #[inline(always)] + fn keys(self: @BadModelUnexpectedArgWithValue) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::array::ArrayTrait::append(ref serialized, *self.id); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn values(self: @BadModelUnexpectedArgWithValue) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::serde::Serde::serialize(self.v, ref serialized); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn layout(self: @BadModelUnexpectedArgWithValue) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + #[inline(always)] + fn packed_size(self: @BadModelUnexpectedArgWithValue) -> usize { + let mut layout = self.layout(); + dojo::packing::calculate_packed_size(ref layout) + } +} + +#[starknet::interface] +trait Ibad_model_unexpected_arg_with_value { + fn ensure_abi(self: @T, model: BadModelUnexpectedArgWithValue); +} + +#[starknet::contract] +mod bad_model_unexpected_arg_with_value { + use super::BadModelUnexpectedArgWithValue; + use super::Ibad_model_unexpected_arg_with_value; + + #[storage] + struct Storage {} + + #[abi(embed_v0)] + impl DojoModelImpl of dojo::model::IModel{ + fn selector(self: @ContractState) -> felt252 { + selector!("BadModelUnexpectedArgWithValue") + } + + fn name(self: @ContractState) -> ByteArray { + "BadModelUnexpectedArgWithValue" + } + + fn version(self: @ContractState) -> u8 { + 1 + } + + fn unpacked_size(self: @ContractState) -> usize { + dojo::database::introspect::Introspect::::size() + } + + fn packed_size(self: @ContractState) -> usize { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + let mut layout_span = layout.span(); + dojo::packing::calculate_packed_size(ref layout_span) + } + + fn layout(self: @ContractState) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + fn schema(self: @ContractState) -> dojo::database::introspect::Ty { + dojo::database::introspect::Introspect::::ty() + } + } + + #[abi(embed_v0)] + impl bad_model_unexpected_arg_with_valueImpl of Ibad_model_unexpected_arg_with_value{ + fn ensure_abi(self: @ContractState, model: BadModelUnexpectedArgWithValue) { + } + } +} + +impl BadModelUnexpectedArgModel of dojo::model::Model { + fn entity(world: dojo::world::IWorldDispatcher, keys: Span, layout: Span) -> BadModelUnexpectedArg { + let values = dojo::world::IWorldDispatcherTrait::entity(world, selector!("BadModelUnexpectedArg"), keys, layout); + + // TODO: Generate method to deserialize from keys / values directly to avoid + // serializing to intermediate array. + let mut serialized = core::array::ArrayTrait::new(); + core::array::serialize_array_helper(keys, ref serialized); + core::array::serialize_array_helper(values, ref serialized); + let mut serialized = core::array::ArrayTrait::span(@serialized); + + let entity = core::serde::Serde::::deserialize(ref serialized); + + if core::option::OptionTrait::::is_none(@entity) { + panic!( + "Model `BadModelUnexpectedArg`: deserialization failed. Ensure the length of the keys tuple is matching the number of #[key] fields in the model struct." + ); + } + + core::option::OptionTrait::::unwrap(entity) + } + + #[inline(always)] + fn name(self: @BadModelUnexpectedArg) -> ByteArray { + "BadModelUnexpectedArg" + } + + #[inline(always)] + fn version(self: @BadModelUnexpectedArg) -> u8 { + 1 + } + + #[inline(always)] + fn selector(self: @BadModelUnexpectedArg) -> felt252 { + selector!("BadModelUnexpectedArg") + } + + #[inline(always)] + fn keys(self: @BadModelUnexpectedArg) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::array::ArrayTrait::append(ref serialized, *self.id); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn values(self: @BadModelUnexpectedArg) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::serde::Serde::serialize(self.v, ref serialized); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn layout(self: @BadModelUnexpectedArg) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + #[inline(always)] + fn packed_size(self: @BadModelUnexpectedArg) -> usize { + let mut layout = self.layout(); + dojo::packing::calculate_packed_size(ref layout) + } +} + +#[starknet::interface] +trait Ibad_model_unexpected_arg { + fn ensure_abi(self: @T, model: BadModelUnexpectedArg); +} + +#[starknet::contract] +mod bad_model_unexpected_arg { + use super::BadModelUnexpectedArg; + use super::Ibad_model_unexpected_arg; + + #[storage] + struct Storage {} + + #[abi(embed_v0)] + impl DojoModelImpl of dojo::model::IModel{ + fn selector(self: @ContractState) -> felt252 { + selector!("BadModelUnexpectedArg") + } + + fn name(self: @ContractState) -> ByteArray { + "BadModelUnexpectedArg" + } + + fn version(self: @ContractState) -> u8 { + 1 + } + + fn unpacked_size(self: @ContractState) -> usize { + dojo::database::introspect::Introspect::::size() + } + + fn packed_size(self: @ContractState) -> usize { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + let mut layout_span = layout.span(); + dojo::packing::calculate_packed_size(ref layout_span) + } + + fn layout(self: @ContractState) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + fn schema(self: @ContractState) -> dojo::database::introspect::Ty { + dojo::database::introspect::Introspect::::ty() + } + } + + #[abi(embed_v0)] + impl bad_model_unexpected_argImpl of Ibad_model_unexpected_arg{ + fn ensure_abi(self: @ContractState, model: BadModelUnexpectedArg) { + } + } +} + +impl BadModelNotSupportedVersionModel of dojo::model::Model { + fn entity(world: dojo::world::IWorldDispatcher, keys: Span, layout: Span) -> BadModelNotSupportedVersion { + let values = dojo::world::IWorldDispatcherTrait::entity(world, selector!("BadModelNotSupportedVersion"), keys, layout); + + // TODO: Generate method to deserialize from keys / values directly to avoid + // serializing to intermediate array. + let mut serialized = core::array::ArrayTrait::new(); + core::array::serialize_array_helper(keys, ref serialized); + core::array::serialize_array_helper(values, ref serialized); + let mut serialized = core::array::ArrayTrait::span(@serialized); + + let entity = core::serde::Serde::::deserialize(ref serialized); + + if core::option::OptionTrait::::is_none(@entity) { + panic!( + "Model `BadModelNotSupportedVersion`: deserialization failed. Ensure the length of the keys tuple is matching the number of #[key] fields in the model struct." + ); + } + + core::option::OptionTrait::::unwrap(entity) + } + + #[inline(always)] + fn name(self: @BadModelNotSupportedVersion) -> ByteArray { + "BadModelNotSupportedVersion" + } + + #[inline(always)] + fn version(self: @BadModelNotSupportedVersion) -> u8 { + 1 + } + + #[inline(always)] + fn selector(self: @BadModelNotSupportedVersion) -> felt252 { + selector!("BadModelNotSupportedVersion") + } + + #[inline(always)] + fn keys(self: @BadModelNotSupportedVersion) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::array::ArrayTrait::append(ref serialized, *self.id); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn values(self: @BadModelNotSupportedVersion) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::serde::Serde::serialize(self.v, ref serialized); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn layout(self: @BadModelNotSupportedVersion) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + #[inline(always)] + fn packed_size(self: @BadModelNotSupportedVersion) -> usize { + let mut layout = self.layout(); + dojo::packing::calculate_packed_size(ref layout) + } +} + +#[starknet::interface] +trait Ibad_model_not_supported_version { + fn ensure_abi(self: @T, model: BadModelNotSupportedVersion); +} + +#[starknet::contract] +mod bad_model_not_supported_version { + use super::BadModelNotSupportedVersion; + use super::Ibad_model_not_supported_version; + + #[storage] + struct Storage {} + + #[abi(embed_v0)] + impl DojoModelImpl of dojo::model::IModel{ + fn selector(self: @ContractState) -> felt252 { + selector!("BadModelNotSupportedVersion") + } + + fn name(self: @ContractState) -> ByteArray { + "BadModelNotSupportedVersion" + } + + fn version(self: @ContractState) -> u8 { + 1 + } + + fn unpacked_size(self: @ContractState) -> usize { + dojo::database::introspect::Introspect::::size() + } -#[dojo::model] -struct OnlyKeyModel { - #[key] - id: felt252 -} + fn packed_size(self: @ContractState) -> usize { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + let mut layout_span = layout.span(); + dojo::packing::calculate_packed_size(ref layout_span) + } -#[dojo::model] -struct Player { - #[key] - game: felt252, - #[key] - player: ContractAddress, + fn layout(self: @ContractState) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } - name: felt252, + fn schema(self: @ContractState) -> dojo::database::introspect::Ty { + dojo::database::introspect::Introspect::::ty() + } + } + + #[abi(embed_v0)] + impl bad_model_not_supported_versionImpl of Ibad_model_not_supported_version{ + fn ensure_abi(self: @ContractState, model: BadModelNotSupportedVersion) { + } + } } -#[dojo::model] -type OtherPlayer = Player; +impl Modelv0Model of dojo::model::Model { + fn entity(world: dojo::world::IWorldDispatcher, keys: Span, layout: Span) -> Modelv0 { + let values = dojo::world::IWorldDispatcherTrait::entity(world, selector!("Modelv0"), keys, layout); -//! > expanded_cairo_code -#[dojo::model] -struct Position { - #[key] - id: felt252, - v: Vec3, -} + // TODO: Generate method to deserialize from keys / values directly to avoid + // serializing to intermediate array. + let mut serialized = core::array::ArrayTrait::new(); + core::array::serialize_array_helper(keys, ref serialized); + core::array::serialize_array_helper(values, ref serialized); + let mut serialized = core::array::ArrayTrait::span(@serialized); -#[dojo::model] -struct Roles { - role_ids: Array + let entity = core::serde::Serde::::deserialize(ref serialized); + + if core::option::OptionTrait::::is_none(@entity) { + panic!( + "Model `Modelv0`: deserialization failed. Ensure the length of the keys tuple is matching the number of #[key] fields in the model struct." + ); + } + + core::option::OptionTrait::::unwrap(entity) + } + + #[inline(always)] + fn name(self: @Modelv0) -> ByteArray { + "Modelv0" + } + + #[inline(always)] + fn version(self: @Modelv0) -> u8 { + 0 + } + + #[inline(always)] + fn selector(self: @Modelv0) -> felt252 { + "Modelv0" + } + + #[inline(always)] + fn keys(self: @Modelv0) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::array::ArrayTrait::append(ref serialized, *self.id); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn values(self: @Modelv0) -> Span { + let mut serialized = core::array::ArrayTrait::new(); + core::serde::Serde::serialize(self.v, ref serialized); + core::array::ArrayTrait::span(@serialized) + } + + #[inline(always)] + fn layout(self: @Modelv0) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + #[inline(always)] + fn packed_size(self: @Modelv0) -> usize { + let mut layout = self.layout(); + dojo::packing::calculate_packed_size(ref layout) + } } -#[dojo::model] -struct OnlyKeyModel { - #[key] - id: felt252 +#[starknet::interface] +trait Imodelv_0 { + fn ensure_abi(self: @T, model: Modelv0); } -#[dojo::model] -struct Player { - #[key] - game: felt252, - #[key] - player: ContractAddress, +#[starknet::contract] +mod modelv_0 { + use super::Modelv0; + use super::Imodelv_0; - name: felt252, -} + #[storage] + struct Storage {} -#[dojo::model] -type OtherPlayer = Player; + #[abi(embed_v0)] + impl DojoModelImpl of dojo::model::IModel{ + fn selector(self: @ContractState) -> felt252 { + "Modelv0" + } + + fn name(self: @ContractState) -> ByteArray { + "Modelv0" + } + + fn version(self: @ContractState) -> u8 { + 0 + } + + fn unpacked_size(self: @ContractState) -> usize { + dojo::database::introspect::Introspect::::size() + } + + fn packed_size(self: @ContractState) -> usize { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + let mut layout_span = layout.span(); + dojo::packing::calculate_packed_size(ref layout_span) + } + + fn layout(self: @ContractState) -> Span { + let mut layout = core::array::ArrayTrait::new(); + dojo::database::introspect::Introspect::::layout(ref layout); + core::array::ArrayTrait::span(@layout) + } + + fn schema(self: @ContractState) -> dojo::database::introspect::Ty { + dojo::database::introspect::Introspect::::ty() + } + } + + #[abi(embed_v0)] + impl modelv_0Impl of Imodelv_0{ + fn ensure_abi(self: @ContractState, model: Modelv0) { + } + } +} impl PositionModel of dojo::model::Model { fn entity(world: dojo::world::IWorldDispatcher, keys: Span, layout: Span) -> Position { @@ -552,16 +1513,86 @@ mod player { } //! > expected_diagnostics +error: A Dojo model must have zero or one dojo::model attribute. + --> test_src/lib.cairo:1:1 +#[dojo::model(version: 0)] +^************************^ + +error: Too many 'version' attributes for dojo::model + --> test_src/lib.cairo:9:1 +#[dojo::model(version: 0, version: 0)] +^************************************^ + +error: The argument 'version' of dojo::model must be an integer + --> test_src/lib.cairo:16:1 +#[dojo::model(version: hello)] +^****************************^ + +error: Unexpected argument 'version' for dojo::model + --> test_src/lib.cairo:23:15 +#[dojo::model(version)] + ^*****^ + +error: Unexpected argument 'my_arg' for dojo::model + --> test_src/lib.cairo:30:15 +#[dojo::model(my_arg: 1)] + ^*******^ + +error: Unexpected argument 'my_arg' for dojo::model + --> test_src/lib.cairo:37:15 +#[dojo::model(my_arg)] + ^****^ + +error: dojo::model version 2 not supported + --> test_src/lib.cairo:44:24 +#[dojo::model(version: 2)] + ^ + error: Model must define at least one #[key] attribute - --> test_src/lib.cairo:9:8 + --> test_src/lib.cairo:66:8 struct Roles { ^***^ error: Model must define at least one member that is not a key - --> test_src/lib.cairo:14:8 + --> test_src/lib.cairo:71:8 struct OnlyKeyModel { ^**********^ +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelMultipleVersions]:72:1 +#[starknet::contract] +^*******************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelBadVersionType]:72:1 +#[starknet::contract] +^*******************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelNoVersionValue]:72:1 +#[starknet::contract] +^*******************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelUnexpectedArgWithValue]:72:1 +#[starknet::contract] +^*******************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelUnexpectedArg]:72:1 +#[starknet::contract] +^*******************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelNotSupportedVersion]:72:1 +#[starknet::contract] +^*******************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[Modelv0]:72:1 +#[starknet::contract] +^*******************^ + error: Unsupported attribute. --> test_src/lib.cairo[Position]:72:1 #[starknet::contract] @@ -582,6 +1613,111 @@ error: Unsupported attribute. #[starknet::contract] ^*******************^ +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelMultipleVersions]:77:5 + #[storage] + ^********^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelMultipleVersions]:80:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelMultipleVersions]:116:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelBadVersionType]:77:5 + #[storage] + ^********^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelBadVersionType]:80:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelBadVersionType]:116:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelNoVersionValue]:77:5 + #[storage] + ^********^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelNoVersionValue]:80:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelNoVersionValue]:116:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelUnexpectedArgWithValue]:77:5 + #[storage] + ^********^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelUnexpectedArgWithValue]:80:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelUnexpectedArgWithValue]:116:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelUnexpectedArg]:77:5 + #[storage] + ^********^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelUnexpectedArg]:80:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelUnexpectedArg]:116:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelNotSupportedVersion]:77:5 + #[storage] + ^********^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelNotSupportedVersion]:80:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[BadModelNotSupportedVersion]:116:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[Modelv0]:77:5 + #[storage] + ^********^ + +error: Unsupported attribute. + --> test_src/lib.cairo[Modelv0]:80:5 + #[abi(embed_v0)] + ^**************^ + +error: Unsupported attribute. + --> test_src/lib.cairo[Modelv0]:116:5 + #[abi(embed_v0)] + ^**************^ + error: Unsupported attribute. --> test_src/lib.cairo[Position]:77:5 #[storage] diff --git a/crates/dojo-world/src/contracts/model.rs b/crates/dojo-world/src/contracts/model.rs index f99e34a99c..d882a90fa7 100644 --- a/crates/dojo-world/src/contracts/model.rs +++ b/crates/dojo-world/src/contracts/model.rs @@ -1,10 +1,10 @@ pub use abigen::model::ModelContractReader; use async_trait::async_trait; -use cainome::cairo_serde::Error as CainomeError; +use cainome::cairo_serde::{ContractAddress, Error as CainomeError}; use dojo_types::packing::{parse_ty, unpack, PackingError, ParseError}; use dojo_types::primitive::PrimitiveError; use dojo_types::schema::Ty; -use starknet::core::types::{FieldElement, StarknetError}; +use starknet::core::types::FieldElement; use starknet::core::utils::{ get_selector_from_name, CairoShortStringToFeltError, NonAsciiNameError, ParseCairoShortStringError, @@ -50,6 +50,7 @@ pub enum ModelError { #[cfg_attr(not(target_arch = "wasm32"), async_trait)] #[cfg_attr(target_arch = "wasm32", async_trait(?Send))] pub trait ModelReader { + fn name(&self) -> String; fn class_hash(&self) -> FieldElement; fn contract_address(&self) -> FieldElement; async fn schema(&self) -> Result; @@ -82,12 +83,13 @@ where let name = get_selector_from_name(name)?; let (class_hash, contract_address) = - world.model(&name).block_id(world.block_id).call().await.map_err(|err| match err { - CainomeError::Provider(ProviderError::StarknetError( - StarknetError::ContractNotFound, - )) => ModelError::ModelNotFound, - err => err.into(), - })?; + world.model(&name).block_id(world.block_id).call().await?; + + // World Cairo contract won't raise an error in case of unknown/unregistered + // model so raise an error here in case of zero address. + if contract_address == ContractAddress(FieldElement::ZERO) { + return Err(ModelError::ModelNotFound); + } let model_reader = ModelContractReader::new(contract_address.into(), world.provider()); @@ -149,6 +151,10 @@ impl<'a, P> ModelReader for ModelRPCReader<'a, P> where P: Provider + Sync + Send, { + fn name(&self) -> String { + self.name.to_string() + } + fn class_hash(&self) -> FieldElement { self.class_hash } diff --git a/crates/dojo-world/src/manifest.rs b/crates/dojo-world/src/manifest.rs index d04c70c205..5d1ee446e9 100644 --- a/crates/dojo-world/src/manifest.rs +++ b/crates/dojo-world/src/manifest.rs @@ -51,6 +51,8 @@ pub enum AbstractManifestError { #[error(transparent)] Model(#[from] ModelError), #[error(transparent)] + TOML(#[from] toml::de::Error), + #[error(transparent)] IO(#[from] io::Error), } @@ -161,6 +163,11 @@ pub struct Contract { pub abi: Option, #[serde_as(as = "Option")] pub address: Option, + #[serde_as(as = "Option")] + pub transaction_hash: Option, + pub block_number: Option, + // used by World contract + pub seed: Option, } #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] @@ -174,15 +181,19 @@ pub struct BaseManifest { impl From> for Manifest { fn from(value: Manifest) -> Self { Manifest::new( - Contract { class_hash: value.inner.class_hash, abi: value.inner.abi, address: None }, + Contract { + class_hash: value.inner.class_hash, + abi: value.inner.abi, + ..Default::default() + }, value.name, ) } } -impl From for DeployedManifest { +impl From for DeploymentManifest { fn from(value: BaseManifest) -> Self { - DeployedManifest { + DeploymentManifest { world: value.world.into(), base: value.base, contracts: value.contracts, @@ -192,7 +203,7 @@ impl From for DeployedManifest { } #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] -pub struct DeployedManifest { +pub struct DeploymentManifest { pub world: Manifest, pub base: Manifest, pub contracts: Vec>, @@ -242,11 +253,8 @@ impl BaseManifest { let contract_dir = path.join("contracts"); let model_dir = path.join("models"); - let world: Manifest = - toml::from_str(&fs::read_to_string(path.join("world.toml"))?).unwrap(); - let base: Manifest = - toml::from_str(&fs::read_to_string(path.join("base.toml"))?).unwrap(); - + let world: Manifest = toml::from_str(&fs::read_to_string(path.join("world.toml"))?)?; + let base: Manifest = toml::from_str(&fs::read_to_string(path.join("base.toml"))?)?; let contracts = elements_from_path::(&contract_dir)?; let models = elements_from_path::(&model_dir)?; @@ -279,13 +287,19 @@ impl OverlayManifest { } } -impl DeployedManifest { +impl DeploymentManifest { pub fn load_from_path(path: &Utf8PathBuf) -> Result { let manifest: Self = toml::from_str(&fs::read_to_string(path)?).unwrap(); Ok(manifest) } + pub fn merge_from_previous(&mut self, previous: DeploymentManifest) { + self.world.inner.transaction_hash = previous.world.inner.transaction_hash; + self.world.inner.block_number = previous.world.inner.block_number; + self.world.inner.seed = previous.world.inner.seed; + } + pub fn write_to_path(&self, path: &Utf8PathBuf) -> Result<()> { fs::create_dir_all(path.parent().unwrap())?; @@ -324,11 +338,15 @@ impl DeployedManifest { let (models, contracts) = get_remote_models_and_contracts(world_address, &world.provider()).await?; - Ok(DeployedManifest { + Ok(DeploymentManifest { models, contracts, world: Manifest::new( - Contract { address: Some(world_address), class_hash: world_class_hash, abi: None }, + Contract { + address: Some(world_address), + class_hash: world_class_hash, + ..Default::default() + }, WORLD_CONTRACT_NAME.into(), ), base: Manifest::new( @@ -346,11 +364,11 @@ impl DeployedManifest { // async fn load_from_remote( // provider: P, // world_address: FieldElement, -// ) -> Result; +// ) -> Result; // } // #[async_trait] -// impl RemoteLoadable

for DeployedManifest {} +// impl RemoteLoadable

for DeploymentManifest {} async fn get_remote_models_and_contracts( world: FieldElement, @@ -517,12 +535,15 @@ fn parse_models_events(events: Vec) -> Vec> { let mut models: HashMap = HashMap::with_capacity(events.len()); for e in events { - let model_event = if let WorldEvent::ModelRegistered(m) = - e.try_into().expect("ModelRegistered event is expected to be parseable") - { - m - } else { - panic!("ModelRegistered expected"); + let model_event = match e.try_into() { + Ok(WorldEvent::ModelRegistered(mr)) => mr, + Ok(_) => panic!("ModelRegistered expected as already filtered"), + Err(_) => { + // As models were registered with the new event type, we can + // skip old ones. We are sure at least 1 new event was emitted + // when models were migrated. + continue; + } }; // TODO: Safely unwrap? @@ -566,11 +587,18 @@ where { let mut elements = vec![]; - for entry in path.read_dir()? { - let entry = entry?; - let path = entry.path(); + let mut entries = path + .read_dir()? + .map(|entry| entry.map(|e| e.path())) + .collect::, io::Error>>()?; + + // `read_dir` doesn't guarantee any order, so we sort the entries ourself. + // see: https://doc.rust-lang.org/std/fs/fn.read_dir.html#platform-specific-behavior + entries.sort(); + + for path in entries { if path.is_file() { - let manifest: Manifest = toml::from_str(&fs::read_to_string(path)?).unwrap(); + let manifest: Manifest = toml::from_str(&fs::read_to_string(path)?)?; elements.push(manifest); } else { continue; @@ -590,7 +618,7 @@ where let entry = entry?; let path = entry.path(); if path.is_file() { - let manifest: T = toml::from_str(&fs::read_to_string(path)?).unwrap(); + let manifest: T = toml::from_str(&fs::read_to_string(path)?)?; elements.push(manifest); } else { continue; diff --git a/crates/dojo-world/src/manifest_test.rs b/crates/dojo-world/src/manifest_test.rs index e19873a2c8..f72c3d48c5 100644 --- a/crates/dojo-world/src/manifest_test.rs +++ b/crates/dojo-world/src/manifest_test.rs @@ -13,7 +13,7 @@ use starknet::providers::jsonrpc::{JsonRpcClient, JsonRpcMethod}; use super::{parse_contracts_events, BaseManifest, DojoContract, DojoModel}; use crate::contracts::world::test::deploy_world; -use crate::manifest::{parse_models_events, AbstractManifestError, DeployedManifest, Manifest}; +use crate::manifest::{parse_models_events, AbstractManifestError, DeploymentManifest, Manifest}; use crate::migration::world::WorldDiff; #[tokio::test] @@ -32,7 +32,7 @@ async fn manifest_from_remote_throw_error_on_not_deployed() { ); let rpc = JsonRpcClient::new(mock_transport); - let err = DeployedManifest::load_from_remote(rpc, FieldElement::ONE).await.unwrap_err(); + let err = DeploymentManifest::load_from_remote(rpc, FieldElement::ONE).await.unwrap_err(); match err { AbstractManifestError::RemoteWorldNotFound => { @@ -376,12 +376,12 @@ async fn fetch_remote_manifest() { let local_manifest = BaseManifest::load_from_path(&manifest_path.join(MANIFESTS_DIR).join(BASE_DIR)).unwrap(); let remote_manifest = - DeployedManifest::load_from_remote(provider, world_address).await.unwrap(); + DeploymentManifest::load_from_remote(provider, world_address).await.unwrap(); - assert_eq!(local_manifest.models.len(), 2); + assert_eq!(local_manifest.models.len(), 3); assert_eq!(local_manifest.contracts.len(), 1); - assert_eq!(remote_manifest.models.len(), 2); + assert_eq!(remote_manifest.models.len(), 3); assert_eq!(remote_manifest.contracts.len(), 1); // compute diff from local and remote manifest diff --git a/crates/dojo-world/src/migration/mod.rs b/crates/dojo-world/src/migration/mod.rs index 38f4fb6953..bafb4d25dc 100644 --- a/crates/dojo-world/src/migration/mod.rs +++ b/crates/dojo-world/src/migration/mod.rs @@ -10,7 +10,7 @@ use starknet::accounts::{Account, AccountError, Call, ConnectedAccount, SingleOw use starknet::core::types::contract::{CompiledClass, SierraClass}; use starknet::core::types::{ BlockId, BlockTag, DeclareTransactionResult, FieldElement, FlattenedSierraClass, FunctionCall, - InvokeTransactionResult, StarknetError, + InvokeTransactionResult, MaybePendingTransactionReceipt, StarknetError, TransactionReceipt, }; use starknet::core::utils::{ get_contract_address, get_selector_from_name, CairoShortStringToFeltError, @@ -32,6 +32,7 @@ pub type DeclareOutput = DeclareTransactionResult; #[derive(Clone, Debug)] pub struct DeployOutput { pub transaction_hash: FieldElement, + pub block_number: Option, pub contract_address: FieldElement, pub declare: Option, } @@ -58,6 +59,8 @@ pub enum MigrationError { Provider(#[from] ProviderError), #[error(transparent)] WaitingError(#[from] TransactionWaitingError), + #[error(transparent)] + ArtifactError(#[from] anyhow::Error), } /// Represents the type of migration that should be performed. @@ -82,6 +85,8 @@ pub struct TxConfig { /// The multiplier for how much the actual transaction max fee should be relative to the /// estimated fee. If `None` is provided, the multiplier is set to `1.1`. pub fee_estimate_multiplier: Option, + pub wait: bool, + pub receipt: bool, } #[cfg_attr(not(target_arch = "wasm32"), async_trait)] @@ -97,7 +102,7 @@ pub trait Declarable { S: Signer + Sync + Send, { let (flattened_class, casm_class_hash) = - prepare_contract_declaration_params(self.artifact_path()).unwrap(); + prepare_contract_declaration_params(self.artifact_path())?; match account .provider() @@ -111,7 +116,7 @@ pub trait Declarable { let mut txn = account.declare(Arc::new(flattened_class), casm_class_hash); - if let TxConfig { fee_estimate_multiplier: Some(multiplier) } = txn_config { + if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { txn = txn.fee_estimate_multiplier(multiplier); } @@ -145,10 +150,7 @@ pub trait Deployable: Declarable + Sync { let declare = match self.declare(account, txn_config).await { Ok(res) => Some(res), Err(MigrationError::ClassAlreadyDeclared) => None, - Err(e) => { - println!("{:?}", e); - return Err(e); - } + Err(e) => return Err(e), }; let base_class_hash = account @@ -193,16 +195,17 @@ pub trait Deployable: Declarable + Sync { let mut txn = account.execute(vec![call]); - if let TxConfig { fee_estimate_multiplier: Some(multiplier) } = txn_config { + if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { txn = txn.fee_estimate_multiplier(multiplier); } let InvokeTransactionResult { transaction_hash } = txn.send().await.map_err(MigrationError::Migrator)?; - TransactionWaiter::new(transaction_hash, account.provider()).await?; + let receipt = TransactionWaiter::new(transaction_hash, account.provider()).await?; + let block_number = get_block_number_from_receipt(receipt); - Ok(DeployOutput { transaction_hash, contract_address, declare }) + Ok(DeployOutput { transaction_hash, block_number, contract_address, declare }) } async fn deploy( @@ -257,16 +260,17 @@ pub trait Deployable: Declarable + Sync { to: felt!("0x41a78e741e5af2fec34b695679bc6891742439f7afb8484ecd7766661ad02bf"), }]); - if let TxConfig { fee_estimate_multiplier: Some(multiplier) } = txn_config { + if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { txn = txn.fee_estimate_multiplier(multiplier); } let InvokeTransactionResult { transaction_hash } = txn.send().await.map_err(MigrationError::Migrator)?; - TransactionWaiter::new(transaction_hash, account.provider()).await?; + let receipt = TransactionWaiter::new(transaction_hash, account.provider()).await?; + let block_number = get_block_number_from_receipt(receipt); - Ok(DeployOutput { transaction_hash, contract_address, declare }) + Ok(DeployOutput { transaction_hash, block_number, contract_address, declare }) } fn salt(&self) -> FieldElement; @@ -298,3 +302,14 @@ fn get_compiled_class_hash(artifact_path: &PathBuf) -> Result { let compiled_class: CompiledClass = serde_json::from_str(&res)?; Ok(compiled_class.class_hash()?) } + +fn get_block_number_from_receipt(receipt: MaybePendingTransactionReceipt) -> Option { + match receipt { + MaybePendingTransactionReceipt::Receipt(receipt) => match receipt { + TransactionReceipt::Deploy(r) => Some(r.block_number), + TransactionReceipt::Invoke(r) => Some(r.block_number), + _ => None, + }, + MaybePendingTransactionReceipt::PendingReceipt(_receipt) => None, + } +} diff --git a/crates/dojo-world/src/migration/strategy.rs b/crates/dojo-world/src/migration/strategy.rs index 9ce9cf05dd..0873e962de 100644 --- a/crates/dojo-world/src/migration/strategy.rs +++ b/crates/dojo-world/src/migration/strategy.rs @@ -11,14 +11,7 @@ use starknet_crypto::{poseidon_hash_many, poseidon_hash_single}; use super::class::{ClassDiff, ClassMigration}; use super::contract::{ContractDiff, ContractMigration}; use super::world::WorldDiff; -use super::{DeployOutput, MigrationType, RegisterOutput}; - -#[derive(Debug)] -pub struct MigrationOutput { - pub world: Option, - pub contracts: Vec, - pub models: Option, -} +use super::MigrationType; #[derive(Debug)] pub struct MigrationStrategy { diff --git a/crates/dojo-world/src/migration/world.rs b/crates/dojo-world/src/migration/world.rs index 11e6ae8c77..417258fb50 100644 --- a/crates/dojo-world/src/migration/world.rs +++ b/crates/dojo-world/src/migration/world.rs @@ -6,7 +6,7 @@ use super::class::ClassDiff; use super::contract::ContractDiff; use super::StateDiff; use crate::manifest::{ - BaseManifest, DeployedManifest, ManifestMethods, BASE_CONTRACT_NAME, WORLD_CONTRACT_NAME, + BaseManifest, DeploymentManifest, ManifestMethods, BASE_CONTRACT_NAME, WORLD_CONTRACT_NAME, }; #[cfg(test)] @@ -23,7 +23,7 @@ pub struct WorldDiff { } impl WorldDiff { - pub fn compute(local: BaseManifest, remote: Option) -> WorldDiff { + pub fn compute(local: BaseManifest, remote: Option) -> WorldDiff { let models = local .models .iter() diff --git a/crates/dojo-world/src/migration/world_test.rs b/crates/dojo-world/src/migration/world_test.rs index cd858bbc1c..be50a4044c 100644 --- a/crates/dojo-world/src/migration/world_test.rs +++ b/crates/dojo-world/src/migration/world_test.rs @@ -28,7 +28,7 @@ fn no_diff_when_local_and_remote_are_equal() { let local = BaseManifest { models, world: world_contract, base: base_contract, contracts: vec![] }; - let mut remote: DeployedManifest = local.clone().into(); + let mut remote: DeploymentManifest = local.clone().into(); remote.models = remote_models; let diff = WorldDiff::compute(local, Some(remote)); @@ -93,7 +93,7 @@ fn diff_when_local_and_remote_are_different() { let local = BaseManifest { models, contracts, world: world_contract, base: base_contract }; - let mut remote: DeployedManifest = local.clone().into(); + let mut remote: DeploymentManifest = local.clone().into(); remote.models = remote_models; remote.world.inner.class_hash = 44_u32.into(); remote.models[1].inner.class_hash = 33_u32.into(); diff --git a/crates/dojo-world/src/utils.rs b/crates/dojo-world/src/utils.rs index ec7878e88f..83d3101daf 100644 --- a/crates/dojo-world/src/utils.rs +++ b/crates/dojo-world/src/utils.rs @@ -273,6 +273,18 @@ where } } +#[inline] +pub fn execution_status_from_maybe_pending_receipt( + receipt: &MaybePendingTransactionReceipt, +) -> &ExecutionResult { + match &receipt { + MaybePendingTransactionReceipt::PendingReceipt(r) => { + execution_status_from_pending_receipt(r) + } + MaybePendingTransactionReceipt::Receipt(r) => execution_status_from_receipt(r), + } +} + #[inline] fn execution_status_from_receipt(receipt: &TransactionReceipt) -> &ExecutionResult { match receipt { diff --git a/crates/katana/core/src/sequencer.rs b/crates/katana/core/src/sequencer.rs index 76fd67d355..05ca68dd09 100644 --- a/crates/katana/core/src/sequencer.rs +++ b/crates/katana/core/src/sequencer.rs @@ -28,7 +28,9 @@ use katana_provider::traits::state::{StateFactoryProvider, StateProvider}; use katana_provider::traits::transaction::{ ReceiptProvider, TransactionProvider, TransactionsProviderExt, }; -use starknet::core::types::{BlockTag, EmittedEvent, EventsPage, FeeEstimate}; +use starknet::core::types::{ + BlockTag, EmittedEvent, EventsPage, FeeEstimate, SimulatedTransaction, +}; use crate::backend::config::StarknetConfig; use crate::backend::contract::StarknetContract; @@ -215,6 +217,29 @@ impl KatanaSequencer { .map_err(SequencerError::TransactionExecution) } + pub fn simulate_transactions( + &self, + transactions: Vec, + block_id: BlockIdOrTag, + validate: bool, + charge_fee: bool, + ) -> SequencerResult> { + let state = self.state(&block_id)?; + + let block_context = self + .block_execution_context_at(block_id)? + .ok_or_else(|| SequencerError::BlockNotFound(block_id))?; + + katana_executor::blockifier::utils::simulate_transactions( + transactions, + &block_context, + state, + validate, + charge_fee, + ) + .map_err(SequencerError::TransactionExecution) + } + pub fn block_hash_and_number(&self) -> SequencerResult<(BlockHash, BlockNumber)> { let provider = self.backend.blockchain.provider(); let hash = BlockHashProvider::latest_hash(provider)?; diff --git a/crates/katana/executor/Cargo.toml b/crates/katana/executor/Cargo.toml index a862e7a71a..50211e7f9c 100644 --- a/crates/katana/executor/Cargo.toml +++ b/crates/katana/executor/Cargo.toml @@ -11,6 +11,7 @@ katana-primitives = { path = "../primitives" } katana-provider = { path = "../storage/provider" } anyhow.workspace = true +cairo-vm.workspace = true convert_case.workspace = true futures.workspace = true parking_lot.workspace = true diff --git a/crates/katana/executor/src/blockifier/utils.rs b/crates/katana/executor/src/blockifier/utils.rs index 5ec42972a5..c52ef32a5b 100644 --- a/crates/katana/executor/src/blockifier/utils.rs +++ b/crates/katana/executor/src/blockifier/utils.rs @@ -17,20 +17,30 @@ use blockifier::transaction::errors::TransactionExecutionError; use blockifier::transaction::objects::{ DeprecatedAccountTransactionContext, ResourcesMapping, TransactionExecutionInfo, }; +use cairo_vm::vm::runners::builtin_runner::{ + BITWISE_BUILTIN_NAME, EC_OP_BUILTIN_NAME, HASH_BUILTIN_NAME, KECCAK_BUILTIN_NAME, + POSEIDON_BUILTIN_NAME, RANGE_CHECK_BUILTIN_NAME, SEGMENT_ARENA_BUILTIN_NAME, + SIGNATURE_BUILTIN_NAME, +}; use convert_case::{Case, Casing}; use katana_primitives::contract::ContractAddress; use katana_primitives::env::{BlockEnv, CfgEnv}; use katana_primitives::receipt::{Event, MessageToL1}; use katana_primitives::state::{StateUpdates, StateUpdatesWithDeclaredClasses}; -use katana_primitives::transaction::ExecutableTxWithHash; +use katana_primitives::transaction::{ExecutableTx, ExecutableTxWithHash}; use katana_primitives::FieldElement; use katana_provider::traits::contract::ContractClassProvider; use katana_provider::traits::state::StateProvider; -use starknet::core::types::{FeeEstimate, PriceUnit}; +use starknet::core::types::{ + DeclareTransactionTrace, DeployAccountTransactionTrace, ExecuteInvocation, FeeEstimate, + FunctionInvocation, InvokeTransactionTrace, L1HandlerTransactionTrace, PriceUnit, + RevertedInvocation, SimulatedTransaction, TransactionTrace, +}; use starknet::core::utils::parse_cairo_short_string; use starknet::macros::felt; use starknet_api::block::{BlockNumber, BlockTimestamp}; use starknet_api::core::EntryPointSelector; +use starknet_api::hash::StarkFelt; use starknet_api::transaction::Calldata; use tracing::trace; @@ -87,6 +97,84 @@ pub fn estimate_fee( .collect::, _>>() } +/// Simulate a transaction's execution on the state +pub fn simulate_transactions( + transactions: Vec, + block_context: &BlockContext, + state: Box, + validate: bool, + charge_fee: bool, +) -> Result, TransactionExecutionError> { + let state = CachedStateWrapper::new(StateRefDb(state)); + let results = TransactionExecutor::new( + &state, + block_context, + charge_fee, + validate, + transactions.clone().into_iter(), + ) + .with_error_log() + .execute(); + + results + .into_iter() + .zip(transactions) + .map(|(result, tx)| { + let result = result?; + let function_invocation = result + .execute_call_info + .as_ref() + .map(function_invocation_from_call_info) + .ok_or(TransactionExecutionError::ExecutionError( + EntryPointExecutionError::ExecutionFailed { error_data: Default::default() }, + )); + + let validate_invocation = + result.validate_call_info.as_ref().map(function_invocation_from_call_info); + + let fee_transfer_invocation = + result.fee_transfer_call_info.as_ref().map(function_invocation_from_call_info); + + let transaction_trace = match &tx.transaction { + ExecutableTx::Declare(_) => TransactionTrace::Declare(DeclareTransactionTrace { + validate_invocation, + fee_transfer_invocation, + state_diff: None, + }), + ExecutableTx::DeployAccount(_) => { + TransactionTrace::DeployAccount(DeployAccountTransactionTrace { + constructor_invocation: function_invocation?, + validate_invocation, + fee_transfer_invocation, + state_diff: None, + }) + } + ExecutableTx::Invoke(_) => TransactionTrace::Invoke(InvokeTransactionTrace { + validate_invocation, + execute_invocation: if let Some(revert_reason) = result.revert_error.as_ref() { + ExecuteInvocation::Reverted(RevertedInvocation { + revert_reason: revert_reason.clone(), + }) + } else { + ExecuteInvocation::Success(function_invocation?) + }, + fee_transfer_invocation, + state_diff: None, + }), + ExecutableTx::L1Handler(_) => { + TransactionTrace::L1Handler(L1HandlerTransactionTrace { + function_invocation: function_invocation?, + state_diff: None, + }) + } + }; + let fee_estimation = calculate_execution_fee(block_context, &result)?; + + Ok(SimulatedTransaction { transaction_trace, fee_estimation }) + }) + .collect::, _>>() +} + /// Perform a raw entrypoint call of a contract. pub fn raw_call( request: EntryPointCall, @@ -397,3 +485,78 @@ pub(super) fn l2_to_l1_messages_from_exec_info( messages } + +fn function_invocation_from_call_info(info: &CallInfo) -> FunctionInvocation { + let entry_point_type = match info.call.entry_point_type { + starknet_api::deprecated_contract_class::EntryPointType::Constructor => { + starknet::core::types::EntryPointType::Constructor + } + starknet_api::deprecated_contract_class::EntryPointType::External => { + starknet::core::types::EntryPointType::External + } + starknet_api::deprecated_contract_class::EntryPointType::L1Handler => { + starknet::core::types::EntryPointType::L1Handler + } + }; + let call_type = match info.call.call_type { + blockifier::execution::entry_point::CallType::Call => starknet::core::types::CallType::Call, + blockifier::execution::entry_point::CallType::Delegate => { + starknet::core::types::CallType::Delegate + } + }; + + let calls = info.inner_calls.iter().map(function_invocation_from_call_info).collect(); + let events = info + .execution + .events + .iter() + .map(|e| starknet::core::types::OrderedEvent { + order: e.order as u64, + data: e.event.data.0.iter().map(|d| (*d).into()).collect(), + keys: e.event.keys.iter().map(|k| k.0.into()).collect(), + }) + .collect(); + let messages = info + .execution + .l2_to_l1_messages + .iter() + .map(|m| starknet::core::types::OrderedMessage { + order: m.order as u64, + to_address: (Into::::into(m.message.to_address)).into(), + from_address: (*info.call.storage_address.0.key()).into(), + payload: m.message.payload.0.iter().map(|p| (*p).into()).collect(), + }) + .collect(); + + let vm_resources = info.vm_resources.filter_unused_builtins(); + let get_vm_resource = + |name: &str| vm_resources.builtin_instance_counter.get(name).map(|r| *r as u64); + let execution_resources = starknet::core::types::ExecutionResources { + steps: vm_resources.n_steps as u64, + memory_holes: Some(vm_resources.n_memory_holes as u64), + range_check_builtin_applications: get_vm_resource(RANGE_CHECK_BUILTIN_NAME), + pedersen_builtin_applications: get_vm_resource(HASH_BUILTIN_NAME), + poseidon_builtin_applications: get_vm_resource(POSEIDON_BUILTIN_NAME), + ec_op_builtin_applications: get_vm_resource(EC_OP_BUILTIN_NAME), + ecdsa_builtin_applications: get_vm_resource(SIGNATURE_BUILTIN_NAME), + bitwise_builtin_applications: get_vm_resource(BITWISE_BUILTIN_NAME), + keccak_builtin_applications: get_vm_resource(KECCAK_BUILTIN_NAME), + segment_arena_builtin: get_vm_resource(SEGMENT_ARENA_BUILTIN_NAME), + }; + + FunctionInvocation { + contract_address: (*info.call.storage_address.0.key()).into(), + entry_point_selector: info.call.entry_point_selector.0.into(), + calldata: info.call.calldata.0.iter().map(|f| (*f).into()).collect(), + caller_address: (*info.call.caller_address.0.key()).into(), + // See + class_hash: info.call.class_hash.expect("Class hash mut be set after execution").0.into(), + entry_point_type, + call_type, + result: info.execution.retdata.0.iter().map(|f| (*f).into()).collect(), + calls, + events, + messages, + execution_resources, + } +} diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/forge-std b/crates/katana/primitives/contracts/messaging/solidity/lib/forge-std new file mode 160000 index 0000000000..ae570fec08 --- /dev/null +++ b/crates/katana/primitives/contracts/messaging/solidity/lib/forge-std @@ -0,0 +1 @@ +Subproject commit ae570fec082bfe1c1f45b0acca4a2b4f84d345ce diff --git a/crates/katana/rpc/rpc-api/src/starknet.rs b/crates/katana/rpc/rpc-api/src/starknet.rs index 7faa491d8e..9158fa1724 100644 --- a/crates/katana/rpc/rpc-api/src/starknet.rs +++ b/crates/katana/rpc/rpc-api/src/starknet.rs @@ -15,9 +15,10 @@ use katana_rpc_types::transaction::{ DeclareTxResult, DeployAccountTxResult, InvokeTxResult, Tx, }; use katana_rpc_types::{ - ContractClass, FeeEstimate, FeltAsHex, FunctionCall, SimulationFlags, SyncingStatus, + ContractClass, FeeEstimate, FeltAsHex, FunctionCall, SimulationFlag, + SimulationFlagForEstimateFee, SyncingStatus, }; -use starknet::core::types::TransactionStatus; +use starknet::core::types::{SimulatedTransaction, TransactionStatus}; /// The currently supported version of the Starknet JSON-RPC specification. pub const RPC_SPEC_VERSION: &str = "0.6.0"; @@ -122,7 +123,7 @@ pub trait StarknetApi { async fn estimate_fee( &self, request: Vec, - simulation_flags: Vec, + simulation_flags: Vec, block_id: BlockIdOrTag, ) -> RpcResult>; @@ -184,4 +185,13 @@ pub trait StarknetApi { &self, deploy_account_transaction: BroadcastedDeployAccountTx, ) -> RpcResult; + + /// Simulates a list of transactions on the provided block. + #[method(name = "simulateTransactions")] + async fn simulate_transactions( + &self, + block_id: BlockIdOrTag, + transactions: Vec, + simulation_flags: Vec, + ) -> RpcResult>; } diff --git a/crates/katana/rpc/rpc-types/src/lib.rs b/crates/katana/rpc/rpc-types/src/lib.rs index a3371934ba..8258c387e1 100644 --- a/crates/katana/rpc/rpc-types/src/lib.rs +++ b/crates/katana/rpc/rpc-types/src/lib.rs @@ -49,7 +49,9 @@ pub type FeeEstimate = starknet::core::types::FeeEstimate; pub type ContractClass = starknet::core::types::ContractClass; -pub type SimulationFlags = starknet::core::types::SimulationFlagForEstimateFee; +pub type SimulationFlagForEstimateFee = starknet::core::types::SimulationFlagForEstimateFee; + +pub type SimulationFlag = starknet::core::types::SimulationFlag; pub type SyncingStatus = starknet::core::types::SyncStatusType; diff --git a/crates/katana/rpc/rpc/src/starknet.rs b/crates/katana/rpc/rpc/src/starknet.rs index a3d76dce36..f9bedb9926 100644 --- a/crates/katana/rpc/rpc/src/starknet.rs +++ b/crates/katana/rpc/rpc/src/starknet.rs @@ -27,10 +27,15 @@ use katana_rpc_types::transaction::{ BroadcastedDeclareTx, BroadcastedDeployAccountTx, BroadcastedInvokeTx, BroadcastedTx, DeclareTxResult, DeployAccountTxResult, InvokeTxResult, Tx, }; -use katana_rpc_types::{ContractClass, FeeEstimate, FeltAsHex, FunctionCall, SimulationFlags}; +use katana_rpc_types::{ + ContractClass, FeeEstimate, FeltAsHex, FunctionCall, SimulationFlag, + SimulationFlagForEstimateFee, +}; use katana_rpc_types_builder::ReceiptBuilder; use katana_tasks::{BlockingTaskPool, TokioTaskSpawner}; -use starknet::core::types::{BlockTag, TransactionExecutionStatus, TransactionStatus}; +use starknet::core::types::{ + BlockTag, SimulatedTransaction, TransactionExecutionStatus, TransactionStatus, +}; #[derive(Clone)] pub struct StarknetApi { @@ -470,7 +475,7 @@ impl StarknetApiServer for StarknetApi { async fn estimate_fee( &self, request: Vec, - simulation_flags: Vec, + simulation_flags: Vec, block_id: BlockIdOrTag, ) -> RpcResult> { self.on_cpu_blocking_task(move |this| { @@ -508,8 +513,9 @@ impl StarknetApiServer for StarknetApi { }) .collect::, _>>()?; - let skip_validate = - simulation_flags.iter().any(|flag| flag == &SimulationFlags::SkipValidate); + let skip_validate = simulation_flags + .iter() + .any(|flag| flag == &SimulationFlagForEstimateFee::SkipValidate); let res = this .inner @@ -663,4 +669,58 @@ impl StarknetApiServer for StarknetApi { }) .await } + + async fn simulate_transactions( + &self, + block_id: BlockIdOrTag, + transactions: Vec, + simulation_flags: Vec, + ) -> RpcResult> { + self.on_cpu_blocking_task(move |this| { + let charge_fee = !simulation_flags.contains(&SimulationFlag::SkipFeeCharge); + let validate = !simulation_flags.contains(&SimulationFlag::SkipValidate); + let chain_id = this.inner.sequencer.chain_id(); + let executables = transactions + .into_iter() + .map(|tx| { + let tx = match tx { + BroadcastedTx::Invoke(tx) => { + let is_query = tx.is_query(); + ExecutableTxWithHash::new_query( + ExecutableTx::Invoke(tx.into_tx_with_chain_id(chain_id)), + is_query, + ) + } + BroadcastedTx::Declare(tx) => { + let is_query = tx.is_query(); + ExecutableTxWithHash::new_query( + ExecutableTx::Declare( + tx.try_into_tx_with_chain_id(chain_id) + .map_err(|_| StarknetApiError::InvalidContractClass)?, + ), + is_query, + ) + } + BroadcastedTx::DeployAccount(tx) => { + let is_query = tx.is_query(); + ExecutableTxWithHash::new_query( + ExecutableTx::DeployAccount(tx.into_tx_with_chain_id(chain_id)), + is_query, + ) + } + }; + Result::::Ok(tx) + }) + .collect::, _>>()?; + + let res = this + .inner + .sequencer + .simulate_transactions(executables, block_id, validate, charge_fee) + .map_err(StarknetApiError::from)?; + + Ok(res) + }) + .await + } } diff --git a/crates/katana/runner/runner-macro/Cargo.toml b/crates/katana/runner/runner-macro/Cargo.toml index b6e3afc45f..a2617ff5c9 100644 --- a/crates/katana/runner/runner-macro/Cargo.toml +++ b/crates/katana/runner/runner-macro/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "runner-macro" -version = "0.6.0-alpha.5" +version = "0.6.0-alpha.7" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/crates/sozo/ops/Cargo.toml b/crates/sozo/ops/Cargo.toml new file mode 100644 index 0000000000..2472bb87fe --- /dev/null +++ b/crates/sozo/ops/Cargo.toml @@ -0,0 +1,53 @@ +[package] +edition.workspace = true +name = "sozo-ops" +version.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +cairo-lang-compiler.workspace = true +cairo-lang-defs.workspace = true +cairo-lang-filesystem.workspace = true +cairo-lang-plugins.workspace = true +cairo-lang-project.workspace = true +cairo-lang-sierra-to-casm.workspace = true +cairo-lang-sierra.workspace = true +cairo-lang-starknet.workspace = true +cairo-lang-test-plugin.workspace = true +cairo-lang-test-runner.workspace = true +cairo-lang-utils.workspace = true +camino.workspace = true +clap-verbosity-flag = "2.0.1" +clap.workspace = true +clap_complete.workspace = true +console.workspace = true +dojo-bindgen.workspace = true +dojo-lang.workspace = true +dojo-types.workspace = true +dojo-world = { workspace = true, features = [ "contracts", "metadata", "migration" ] } +futures.workspace = true +notify = "6.0.1" +notify-debouncer-mini = "0.3.0" +scarb-ui.workspace = true +scarb.workspace = true +semver.workspace = true +serde.workspace = true +serde_json.workspace = true +smol_str.workspace = true +starknet-crypto.workspace = true +starknet.workspace = true +thiserror.workspace = true +tokio.workspace = true +tracing-log = "0.1.3" +tracing.workspace = true +url.workspace = true +cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.2.2" } + +[dev-dependencies] +assert_fs = "1.0.10" +dojo-test-utils = { workspace = true, features = [ "build-examples" ] } +katana-runner.workspace = true +snapbox = "0.4.6" diff --git a/crates/sozo/ops/src/auth.rs b/crates/sozo/ops/src/auth.rs new file mode 100644 index 0000000000..759a799893 --- /dev/null +++ b/crates/sozo/ops/src/auth.rs @@ -0,0 +1,173 @@ +use std::str::FromStr; + +use anyhow::{Context, Result}; +use dojo_world::contracts::model::ModelError; +use dojo_world::contracts::world::WorldContract; +use dojo_world::contracts::{cairo_utils, WorldContractReader}; +use dojo_world::migration::TxConfig; +use starknet::accounts::ConnectedAccount; +use starknet::core::utils::parse_cairo_short_string; +use starknet::providers::Provider; +use starknet_crypto::FieldElement; + +use super::get_contract_address; +use crate::utils::handle_transaction_result; + +#[derive(Debug, Clone, PartialEq)] +pub enum ResourceType { + Contract(String), + Model(FieldElement), +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ModelContract { + pub model: FieldElement, + pub contract: String, +} + +impl FromStr for ModelContract { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let parts: Vec<&str> = s.split(',').collect(); + + let (model, contract) = match parts.as_slice() { + [model, contract] => (model, contract), + _ => anyhow::bail!( + "Model and contract address are expected to be comma separated: `sozo auth grant \ + writer model_name,0x1234`" + ), + }; + + let model = cairo_utils::str_to_felt(model) + .map_err(|_| anyhow::anyhow!("Invalid model name: {}", model))?; + + Ok(ModelContract { model, contract: contract.to_string() }) + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct OwnerResource { + pub resource: ResourceType, + pub owner: FieldElement, +} + +impl FromStr for OwnerResource { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let parts: Vec<&str> = s.split(',').collect(); + + let (resource_part, owner_part) = match parts.as_slice() { + [resource, owner] => (*resource, *owner), + _ => anyhow::bail!( + "Owner and resource are expected to be comma separated: `sozo auth grant owner \ + resource_type:resource_name,0x1234`" + ), + }; + + let owner = FieldElement::from_hex_be(owner_part) + .map_err(|_| anyhow::anyhow!("Invalid owner address: {}", owner_part))?; + + let resource_parts = resource_part.split_once(':'); + let resource = match resource_parts { + Some(("contract", name)) => ResourceType::Contract(name.to_string()), + Some(("model", name)) => { + let model = cairo_utils::str_to_felt(name) + .map_err(|_| anyhow::anyhow!("Invalid model name: {}", name))?; + ResourceType::Model(model) + } + _ => anyhow::bail!( + "Resource is expected to be in the format `resource_type:resource_name`: `sozo \ + auth grant owner resource_type:resource_name,0x1234`" + ), + }; + + Ok(OwnerResource { owner, resource }) + } +} + +pub async fn grant_writer( + world: &WorldContract, + models_contracts: Vec, + world_reader: WorldContractReader

, + transaction: TxConfig, +) -> Result<()> +where + A: ConnectedAccount + Sync + Send + 'static, + P: Provider + Sync + Send, +{ + let mut calls = Vec::new(); + + for mc in models_contracts { + let model_name = parse_cairo_short_string(&mc.model)?; + match world_reader.model_reader(&model_name).await { + Ok(_) => { + let contract = get_contract_address(world, mc.contract).await?; + calls.push(world.grant_writer_getcall(&mc.model, &contract.into())); + } + + Err(ModelError::ModelNotFound) => { + println!("Unknown model '{}' => IGNORED", model_name); + } + + Err(err) => { + return Err(err.into()); + } + } + } + + if !calls.is_empty() { + let res = world + .account + .execute(calls) + .send() + .await + .with_context(|| "Failed to send transaction")?; + + handle_transaction_result( + &world.account.provider(), + res, + transaction.wait, + transaction.receipt, + ) + .await?; + } + + Ok(()) +} + +pub async fn grant_owner( + world: WorldContract, + owners_resources: Vec, + transaction: TxConfig, +) -> Result<()> +where + A: ConnectedAccount + Sync + Send + 'static, +{ + let mut calls = Vec::new(); + + for or in owners_resources { + let resource = match &or.resource { + ResourceType::Model(name) => *name, + ResourceType::Contract(name_or_address) => { + get_contract_address(&world, name_or_address.clone()).await? + } + }; + + calls.push(world.grant_owner_getcall(&or.owner.into(), &resource)); + } + + let res = + world.account.execute(calls).send().await.with_context(|| "Failed to send transaction")?; + + handle_transaction_result( + &world.account.provider(), + res, + transaction.wait, + transaction.receipt, + ) + .await?; + + Ok(()) +} diff --git a/crates/sozo/ops/src/events.rs b/crates/sozo/ops/src/events.rs new file mode 100644 index 0000000000..dab6401cc1 --- /dev/null +++ b/crates/sozo/ops/src/events.rs @@ -0,0 +1,597 @@ +use std::collections::{HashMap, VecDeque}; +use std::fs; + +use anyhow::{anyhow, Context, Result}; +use cainome::parser::tokens::{CompositeInner, CompositeInnerKind, CoreBasic, Token}; +use cainome::parser::AbiParser; +use camino::Utf8PathBuf; +use dojo_lang::compiler::{DEPLOYMENTS_DIR, MANIFESTS_DIR}; +use dojo_world::manifest::{DeploymentManifest, ManifestMethods}; +use starknet::core::types::{BlockId, EventFilter, FieldElement}; +use starknet::core::utils::{parse_cairo_short_string, starknet_keccak}; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::{JsonRpcClient, Provider}; + +pub fn get_event_filter( + from_block: Option, + to_block: Option, + events: Option>, + world_address: Option, +) -> EventFilter { + let from_block = from_block.map(BlockId::Number); + let to_block = to_block.map(BlockId::Number); + // Currently dojo doesn't use custom keys for events. In future if custom keys are used this + // needs to be updated for granular queries. + let keys = + events.map(|e| vec![e.iter().map(|event| starknet_keccak(event.as_bytes())).collect()]); + + EventFilter { from_block, to_block, address: world_address, keys } +} + +pub async fn parse( + chunk_size: u64, + provider: JsonRpcClient, + continuation_token: Option, + event_filter: EventFilter, + json: bool, + manifest_dir: &Utf8PathBuf, +) -> Result<()> { + let chain_id = provider.chain_id().await?; + let chain_id = + parse_cairo_short_string(&chain_id).with_context(|| "Cannot parse chain_id as string")?; + + let events_map = if !json { + let deployed_manifest = manifest_dir + .join(MANIFESTS_DIR) + .join(DEPLOYMENTS_DIR) + .join(chain_id) + .with_extension("toml"); + + if !deployed_manifest.exists() { + return Err(anyhow!("Run scarb migrate before running this command")); + } + + Some(extract_events( + &DeploymentManifest::load_from_path(&deployed_manifest)?, + manifest_dir, + )?) + } else { + None + }; + + let res = provider.get_events(event_filter, continuation_token, chunk_size).await?; + + if let Some(events_map) = events_map { + parse_and_print_events(res, events_map)?; + } + + Ok(()) +} + +fn is_event(token: &Token) -> bool { + match token { + Token::Composite(composite) => composite.is_event, + _ => false, + } +} + +fn extract_events( + manifest: &DeploymentManifest, + manifest_dir: &Utf8PathBuf, +) -> Result>> { + fn process_abi( + events: &mut HashMap>, + full_abi_path: &Utf8PathBuf, + ) -> Result<()> { + let abi_str = fs::read_to_string(full_abi_path)?; + + match AbiParser::tokens_from_abi_string(&abi_str, &HashMap::new()) { + Ok(tokens) => { + for token in tokens.structs { + if is_event(&token) { + let event_name = starknet_keccak(token.type_name().as_bytes()); + let vec = events.entry(event_name.to_string()).or_default(); + vec.push(token.clone()); + } + } + } + Err(e) => return Err(anyhow!("Error parsing ABI: {}", e)), + } + + Ok(()) + } + + let mut events_map = HashMap::new(); + + for contract in &manifest.contracts { + if let Some(abi_path) = contract.inner.abi() { + let full_abi_path = manifest_dir.join(abi_path); + process_abi(&mut events_map, &full_abi_path)?; + } + } + + for model in &manifest.contracts { + if let Some(abi_path) = model.inner.abi() { + let full_abi_path = manifest_dir.join(abi_path); + process_abi(&mut events_map, &full_abi_path)?; + } + } + + // Read the world and base ABI from scarb artifacts as the + // manifest does not include them. + let world_abi_path = manifest_dir.join("target/dev/dojo::world::world.json"); + process_abi(&mut events_map, &world_abi_path)?; + + let base_abi_path = manifest_dir.join("target/dev/dojo::base::base.json"); + process_abi(&mut events_map, &base_abi_path)?; + + Ok(events_map) +} + +fn parse_and_print_events( + res: starknet::core::types::EventsPage, + events_map: HashMap>, +) -> Result<()> { + println!("Continuation token: {:?}", res.continuation_token); + println!("----------------------------------------------"); + for event in res.events { + let parsed_event = parse_event(event.clone(), &events_map) + .map_err(|e| anyhow!("Error parsing event: {}", e))?; + + match parsed_event { + Some(e) => println!("{e}"), + None => return Err(anyhow!("No matching event found for {:?}", event)), + } + } + Ok(()) +} + +fn parse_core_basic( + cb: &CoreBasic, + value: &FieldElement, + include_felt_string: bool, +) -> Result { + match cb.type_name().as_str() { + "felt252" => { + let hex = format!("{:#x}", value); + match parse_cairo_short_string(value) { + Ok(parsed) if !parsed.is_empty() && (include_felt_string && parsed.is_ascii()) => { + Ok(format!("{} \"{}\"", hex, parsed)) + } + _ => Ok(hex.to_string()), + } + } + "bool" => { + if *value == FieldElement::ZERO { + Ok("false".to_string()) + } else { + Ok("true".to_string()) + } + } + "ClassHash" | "ContractAddress" => Ok(format!("{:#x}", value)), + "u8" | "u16" | "u32" | "u64" | "u128" | "usize" | "i8" | "i16" | "i32" | "i64" | "i128" => { + Ok(value.to_string()) + } + _ => Err(anyhow!("Unsupported CoreBasic type: {}", cb.type_name())), + } +} + +fn parse_event( + event: starknet::core::types::EmittedEvent, + events_map: &HashMap>, +) -> Result> { + let mut data = VecDeque::from(event.data.clone()); + let mut keys = VecDeque::from(event.keys.clone()); + let event_hash = keys.pop_front().ok_or(anyhow!("Event hash missing"))?; + + let events = events_map + .get(&event_hash.to_string()) + .ok_or(anyhow!("Events for hash not found: {:#x}", event_hash))?; + + for e in events { + if let Token::Composite(composite) = e { + let processed_inners = process_inners(&composite.inners, &mut data, &mut keys)?; + let ret = format!("Event name: {}\n{}", e.type_path(), processed_inners); + return Ok(Some(ret)); + } + } + + Ok(None) +} + +fn process_inners( + inners: &[CompositeInner], + data: &mut VecDeque, + keys: &mut VecDeque, +) -> Result { + let mut ret = String::new(); + + for inner in inners { + let value = match inner.kind { + CompositeInnerKind::Data => data.pop_front().ok_or(anyhow!("Missing data value")), + CompositeInnerKind::Key => keys.pop_front().ok_or(anyhow!("Missing key value")), + _ => Err(anyhow!("Unsupported inner kind encountered")), + }?; + + let formatted_value = match &inner.token { + Token::CoreBasic(ref cb) => parse_core_basic(cb, &value, true)?, + Token::Array(ref array) => { + let length = value + .to_string() + .parse::() + .map_err(|_| anyhow!("Error parsing length to usize"))?; + + let cb = if let Token::CoreBasic(ref cb) = *array.inner { + cb + } else { + return Err(anyhow!("Inner token of array is not CoreBasic")); + }; + + let mut elements = Vec::new(); + for _ in 0..length { + if let Some(element_value) = data.pop_front() { + let element_str = parse_core_basic(cb, &element_value, false)?; + elements.push(element_str); + } else { + return Err(anyhow!("Missing array element value")); + } + } + + format!("[{}]", elements.join(", ")) + } + _ => return Err(anyhow!("Unsupported token type encountered")), + }; + ret.push_str(&format!("{}: {}\n", inner.name, formatted_value)); + } + + Ok(ret) +} + +#[cfg(test)] +mod tests { + use camino::Utf8Path; + use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; + use dojo_world::manifest::BaseManifest; + + #[test] + fn extract_events_work_as_expected() { + let manifest_dir = Utf8Path::new("../../../examples/spawn-and-move").to_path_buf(); + let manifest = + BaseManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(BASE_DIR)) + .unwrap() + .into(); + let result = extract_events(&manifest, &manifest_dir).unwrap(); + + // we are just collecting all events from manifest file so just verifying count should work + assert_eq!(result.len(), 12); + } + + use cainome::parser::tokens::{Array, Composite, CompositeInner, CompositeType}; + use starknet::core::types::EmittedEvent; + + use super::*; + + #[test] + fn test_core_basic() { + let composite = Composite { + type_path: "dojo::world::world::TestEvent".to_string(), + inners: vec![ + CompositeInner { + index: 0, + name: "felt252".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { type_path: "core::felt252".to_string() }), + }, + CompositeInner { + index: 1, + name: "bool".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { type_path: "core::bool".to_string() }), + }, + CompositeInner { + index: 2, + name: "u8".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { + type_path: "core::integer::u8".to_string(), + }), + }, + CompositeInner { + index: 3, + name: "u16".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { + type_path: "core::integer::u16".to_string(), + }), + }, + CompositeInner { + index: 4, + name: "u32".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { + type_path: "core::integer::u32".to_string(), + }), + }, + CompositeInner { + index: 5, + name: "u64".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { + type_path: "core::integer::u64".to_string(), + }), + }, + CompositeInner { + index: 6, + name: "u128".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { + type_path: "core::integer::u128".to_string(), + }), + }, + CompositeInner { + index: 7, + name: "usize".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { + type_path: "core::integer::usize".to_string(), + }), + }, + CompositeInner { + index: 8, + name: "class_hash".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { type_path: "core::ClassHash".to_string() }), + }, + CompositeInner { + index: 9, + name: "contract_address".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { + type_path: "core::ContractAddress".to_string(), + }), + }, + ], + generic_args: vec![], + r#type: CompositeType::Struct, + is_event: true, + alias: None, + }; + let tokenized_composite = Token::Composite(composite); + + let mut events_map = HashMap::new(); + events_map + .insert(starknet_keccak("TestEvent".as_bytes()).to_string(), vec![tokenized_composite]); + + let event = EmittedEvent { + keys: vec![starknet_keccak("TestEvent".as_bytes())], + data: vec![ + FieldElement::from_hex_be("0x5465737431").unwrap(), + FieldElement::from(1u8), // bool true + FieldElement::from(1u8), + FieldElement::from(2u16), + FieldElement::from(3u32), + FieldElement::from(4u64), + FieldElement::from(5u128), + FieldElement::from(6usize), + FieldElement::from_hex_be("0x54657374").unwrap(), + FieldElement::from_hex_be("0x54657374").unwrap(), + ], + from_address: FieldElement::from_hex_be("0x123").unwrap(), + block_hash: FieldElement::from_hex_be("0x456").ok(), + block_number: Some(1), + transaction_hash: FieldElement::from_hex_be("0x789").unwrap(), + }; + + let expected_output = "Event name: dojo::world::world::TestEvent\nfelt252: 0x5465737431 \ + \"Test1\"\nbool: true\nu8: 1\nu16: 2\nu32: 3\nu64: 4\nu128: \ + 5\nusize: 6\nclass_hash: 0x54657374\ncontract_address: 0x54657374\n" + .to_string(); + + let actual_output_option = parse_event(event, &events_map).expect("Failed to parse event"); + + match actual_output_option { + Some(actual_output) => assert_eq!(actual_output, expected_output), + None => panic!("Expected event was not found."), + } + } + + #[test] + fn test_array() { + let composite = Composite { + type_path: "dojo::world::world::StoreDelRecord".to_string(), + inners: vec![ + CompositeInner { + index: 0, + name: "table".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { type_path: "core::felt252".to_string() }), + }, + CompositeInner { + index: 1, + name: "keys".to_string(), + kind: CompositeInnerKind::Data, + token: Token::Array(Array { + type_path: "core::array::Span::".to_string(), + inner: Box::new(Token::CoreBasic(CoreBasic { + type_path: "core::felt252".to_string(), + })), + }), + }, + ], + generic_args: vec![], + r#type: CompositeType::Struct, + is_event: true, + alias: None, + }; + let tokenized_composite = Token::Composite(composite); + + let mut events_map = HashMap::new(); + events_map.insert( + starknet_keccak("StoreDelRecord".as_bytes()).to_string(), + vec![tokenized_composite], + ); + + let event = EmittedEvent { + keys: vec![starknet_keccak("StoreDelRecord".as_bytes())], + data: vec![ + FieldElement::from_hex_be("0x54657374").unwrap(), + FieldElement::from(3u128), + FieldElement::from_hex_be("0x5465737431").unwrap(), + FieldElement::from_hex_be("0x5465737432").unwrap(), + FieldElement::from_hex_be("0x5465737433").unwrap(), + ], + from_address: FieldElement::from_hex_be("0x123").unwrap(), + block_hash: FieldElement::from_hex_be("0x456").ok(), + block_number: Some(1), + transaction_hash: FieldElement::from_hex_be("0x789").unwrap(), + }; + + let expected_output = "Event name: dojo::world::world::StoreDelRecord\ntable: 0x54657374 \ + \"Test\"\nkeys: [0x5465737431, 0x5465737432, 0x5465737433]\n" + .to_string(); + + let actual_output_option = parse_event(event, &events_map).expect("Failed to parse event"); + + match actual_output_option { + Some(actual_output) => assert_eq!(actual_output, expected_output), + None => panic!("Expected event was not found."), + } + } + + #[test] + fn test_custom_event() { + let composite = Composite { + type_path: "dojo::world::world::CustomEvent".to_string(), + inners: vec![ + CompositeInner { + index: 0, + name: "key_1".to_string(), + kind: CompositeInnerKind::Key, + token: Token::CoreBasic(CoreBasic { + type_path: "core::integer::u32".to_string(), + }), + }, + CompositeInner { + index: 1, + name: "key_2".to_string(), + kind: CompositeInnerKind::Key, + token: Token::CoreBasic(CoreBasic { type_path: "core::felt252".to_string() }), + }, + CompositeInner { + index: 2, + name: "data_1".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { + type_path: "core::integer::u8".to_string(), + }), + }, + CompositeInner { + index: 3, + name: "data_2".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { + type_path: "core::integer::u8".to_string(), + }), + }, + ], + generic_args: vec![], + r#type: CompositeType::Struct, + is_event: true, + alias: None, + }; + let tokenized_composite = Token::Composite(composite); + + let mut events_map = HashMap::new(); + events_map.insert( + starknet_keccak("CustomEvent".as_bytes()).to_string(), + vec![tokenized_composite], + ); + + let event = EmittedEvent { + keys: vec![ + starknet_keccak("CustomEvent".as_bytes()), + FieldElement::from(3u128), + FieldElement::from_hex_be("0x5465737431").unwrap(), + ], + data: vec![FieldElement::from(1u128), FieldElement::from(2u128)], + from_address: FieldElement::from_hex_be("0x123").unwrap(), + block_hash: FieldElement::from_hex_be("0x456").ok(), + block_number: Some(1), + transaction_hash: FieldElement::from_hex_be("0x789").unwrap(), + }; + + let expected_output = "Event name: dojo::world::world::CustomEvent\nkey_1: 3\nkey_2: \ + 0x5465737431 \"Test1\"\ndata_1: 1\ndata_2: 2\n" + .to_string(); + + let actual_output_option = parse_event(event, &events_map).expect("Failed to parse event"); + + match actual_output_option { + Some(actual_output) => assert_eq!(actual_output, expected_output), + None => panic!("Expected event was not found."), + } + } + + #[test] + fn test_zero_felt() { + let composite = Composite { + type_path: "dojo::world::world::StoreDelRecord".to_string(), + inners: vec![ + CompositeInner { + index: 0, + name: "table".to_string(), + kind: CompositeInnerKind::Data, + token: Token::CoreBasic(CoreBasic { type_path: "core::felt252".to_string() }), + }, + CompositeInner { + index: 1, + name: "keys".to_string(), + kind: CompositeInnerKind::Data, + token: Token::Array(Array { + type_path: "core::array::Span::".to_string(), + inner: Box::new(Token::CoreBasic(CoreBasic { + type_path: "core::felt252".to_string(), + })), + }), + }, + ], + generic_args: vec![], + r#type: CompositeType::Struct, + is_event: true, + alias: None, + }; + let tokenized_composite = Token::Composite(composite); + + let mut events_map = HashMap::new(); + events_map.insert( + starknet_keccak("StoreDelRecord".as_bytes()).to_string(), + vec![tokenized_composite], + ); + + let event = EmittedEvent { + keys: vec![starknet_keccak("StoreDelRecord".as_bytes())], + data: vec![ + FieldElement::from_hex_be("0x0").unwrap(), + FieldElement::from(3u128), + FieldElement::from_hex_be("0x0").unwrap(), + FieldElement::from_hex_be("0x1").unwrap(), + FieldElement::from_hex_be("0x2").unwrap(), + ], + from_address: FieldElement::from_hex_be("0x123").unwrap(), + block_hash: FieldElement::from_hex_be("0x456").ok(), + block_number: Some(1), + transaction_hash: FieldElement::from_hex_be("0x789").unwrap(), + }; + + let expected_output = "Event name: dojo::world::world::StoreDelRecord\ntable: 0x0\nkeys: \ + [0x0, 0x1, 0x2]\n" + .to_string(); + + let actual_output_option = parse_event(event, &events_map).expect("Failed to parse event"); + + match actual_output_option { + Some(actual_output) => assert_eq!(actual_output, expected_output), + None => panic!("Expected event was not found."), + } + } +} diff --git a/crates/sozo/ops/src/execute.rs b/crates/sozo/ops/src/execute.rs new file mode 100644 index 0000000000..a496a2910c --- /dev/null +++ b/crates/sozo/ops/src/execute.rs @@ -0,0 +1,35 @@ +use anyhow::{Context, Result}; +use dojo_world::contracts::world::WorldContract; +use dojo_world::migration::TxConfig; +use starknet::accounts::{Call, ConnectedAccount}; +use starknet::core::types::FieldElement; +use starknet::core::utils::get_selector_from_name; + +use super::get_contract_address; +use crate::utils::handle_transaction_result; + +pub async fn execute( + contract: String, + entrypoint: String, + calldata: Vec, + world: WorldContract, + transaction: TxConfig, +) -> Result<()> +where + A: ConnectedAccount + Sync + Send + 'static, +{ + let contract_address = get_contract_address(&world, contract).await?; + let res = world + .account + .execute(vec![Call { + calldata, + to: contract_address, + selector: get_selector_from_name(&entrypoint)?, + }]) + .send() + .await + .with_context(|| "Failed to send transaction")?; + + handle_transaction_result(&world.account.provider(), res, transaction.wait, transaction.receipt) + .await +} diff --git a/bin/sozo/src/ops/mod.rs b/crates/sozo/ops/src/lib.rs similarity index 98% rename from bin/sozo/src/ops/mod.rs rename to crates/sozo/ops/src/lib.rs index 509e266d27..676e8e86b8 100644 --- a/bin/sozo/src/ops/mod.rs +++ b/crates/sozo/ops/src/lib.rs @@ -10,6 +10,7 @@ pub mod execute; pub mod migration; pub mod model; pub mod register; +pub mod utils; pub async fn get_contract_address( world: &WorldContract, diff --git a/bin/sozo/src/ops/migration/migration_test.rs b/crates/sozo/ops/src/migration/migration_test.rs similarity index 85% rename from bin/sozo/src/ops/migration/migration_test.rs rename to crates/sozo/ops/src/migration/migration_test.rs index 84999032dd..5bff89e52c 100644 --- a/bin/sozo/src/ops/migration/migration_test.rs +++ b/crates/sozo/ops/src/migration/migration_test.rs @@ -5,9 +5,10 @@ use dojo_test_utils::migration::prepare_migration; use dojo_test_utils::sequencer::{ get_default_test_starknet_config, SequencerConfig, StarknetConfig, TestSequencer, }; -use dojo_world::manifest::{BaseManifest, DeployedManifest}; +use dojo_world::manifest::{BaseManifest, DeploymentManifest}; use dojo_world::migration::strategy::prepare_for_migration; use dojo_world::migration::world::WorldDiff; +use dojo_world::migration::TxConfig; use scarb::ops; use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; use starknet::core::chain_id; @@ -17,16 +18,15 @@ use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; use starknet::signers::{LocalWallet, SigningKey}; -use crate::commands::options::transaction::TransactionOptions; -use crate::ops::migration::execute_strategy; +use crate::migration::execute_strategy; #[tokio::test(flavor = "multi_thread")] async fn migrate_with_auto_mine() { - let config = build_test_config("../../examples/spawn-and-move/Scarb.toml").unwrap(); + let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); let ws = ops::read_workspace(config.manifest_path(), &config) .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - let base_dir = "../../examples/spawn-and-move"; + let base_dir = "../../../examples/spawn-and-move"; let target_dir = format!("{}/target/dev", base_dir); let migration = prepare_migration(base_dir.into(), target_dir.into()).unwrap(); @@ -43,11 +43,11 @@ async fn migrate_with_auto_mine() { #[tokio::test(flavor = "multi_thread")] async fn migrate_with_block_time() { - let config = build_test_config("../../examples/spawn-and-move/Scarb.toml").unwrap(); + let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); let ws = ops::read_workspace(config.manifest_path(), &config) .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - let base = "../../examples/spawn-and-move"; + let base = "../../../examples/spawn-and-move"; let target_dir = format!("{}/target/dev", base); let migration = prepare_migration(base.into(), target_dir.into()).unwrap(); @@ -66,11 +66,11 @@ async fn migrate_with_block_time() { #[tokio::test(flavor = "multi_thread")] async fn migrate_with_small_fee_multiplier_will_fail() { - let config = build_test_config("../../examples/spawn-and-move/Scarb.toml").unwrap(); + let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); let ws = ops::read_workspace(config.manifest_path(), &config) .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - let base = "../../examples/spawn-and-move"; + let base = "../../../examples/spawn-and-move"; let target_dir = format!("{}/target/dev", base); let migration = prepare_migration(base.into(), target_dir.into()).unwrap(); @@ -95,7 +95,7 @@ async fn migrate_with_small_fee_multiplier_will_fail() { &ws, &migration, &account, - Some(TransactionOptions { fee_estimate_multiplier: Some(0.2f64), wait: false }), + Some(TxConfig { fee_estimate_multiplier: Some(0.2f64), wait: false, receipt: false }), ) .await .is_err() @@ -105,7 +105,7 @@ async fn migrate_with_small_fee_multiplier_will_fail() { #[test] fn migrate_world_without_seed_will_fail() { - let base = "../../examples/spawn-and-move"; + let base = "../../../examples/spawn-and-move"; let target_dir = format!("{}/target/dev", base); let manifest = BaseManifest::load_from_path( &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(BASE_DIR), @@ -119,10 +119,10 @@ fn migrate_world_without_seed_will_fail() { #[ignore] #[tokio::test] async fn migration_from_remote() { - let config = build_test_config("../../examples/spawn-and-move/Scarb.toml").unwrap(); + let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); let ws = ops::read_workspace(config.manifest_path(), &config) .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - let base = "../../examples/spawn-and-move"; + let base = "../../../examples/spawn-and-move"; let target_dir = format!("{}/target/dev", base); let sequencer = @@ -158,7 +158,7 @@ async fn migration_from_remote() { &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(BASE_DIR), ) .unwrap(); - let remote_manifest = DeployedManifest::load_from_remote( + let remote_manifest = DeploymentManifest::load_from_remote( JsonRpcClient::new(HttpTransport::new(sequencer.url())), migration.world_address().unwrap(), ) diff --git a/bin/sozo/src/ops/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs similarity index 50% rename from bin/sozo/src/ops/migration/mod.rs rename to crates/sozo/ops/src/migration/mod.rs index 4149675aae..7c93ac4a6a 100644 --- a/bin/sozo/src/ops/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use anyhow::{anyhow, bail, Context, Result}; use camino::Utf8PathBuf; use dojo_lang::compiler::{ABIS_DIR, BASE_DIR, DEPLOYMENTS_DIR, MANIFESTS_DIR, OVERLAYS_DIR}; @@ -5,27 +7,27 @@ use dojo_world::contracts::abi::world::ResourceMetadata; use dojo_world::contracts::cairo_utils; use dojo_world::contracts::world::WorldContract; use dojo_world::manifest::{ - AbstractManifestError, BaseManifest, DeployedManifest, DojoContract, Manifest, ManifestMethods, - OverlayManifest, + AbstractManifestError, BaseManifest, DeploymentManifest, DojoContract, Manifest, + ManifestMethods, OverlayManifest, }; -use dojo_world::metadata::{dojo_metadata_from_workspace, Environment}; +use dojo_world::metadata::dojo_metadata_from_workspace; use dojo_world::migration::contract::ContractMigration; use dojo_world::migration::strategy::{generate_salt, prepare_for_migration, MigrationStrategy}; use dojo_world::migration::world::WorldDiff; use dojo_world::migration::{ - Declarable, DeployOutput, Deployable, MigrationError, RegisterOutput, StateDiff, + Declarable, DeployOutput, Deployable, MigrationError, RegisterOutput, StateDiff, TxConfig, }; use dojo_world::utils::TransactionWaiter; use scarb::core::Workspace; use scarb_ui::Ui; use starknet::accounts::{Account, ConnectedAccount, SingleOwnerAccount}; use starknet::core::types::{ - BlockId, BlockTag, FieldElement, InvokeTransactionResult, StarknetError, + BlockId, BlockTag, FieldElement, FunctionCall, InvokeTransactionResult, StarknetError, }; use starknet::core::utils::{ - cairo_short_string_to_felt, get_contract_address, parse_cairo_short_string, + cairo_short_string_to_felt, get_contract_address, get_selector_from_name, }; -use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::{Provider, ProviderError}; use tokio::fs; #[cfg(test)] @@ -33,29 +35,36 @@ use tokio::fs; mod migration_test; mod ui; -use starknet::providers::{JsonRpcClient, Provider, ProviderError}; -use starknet::signers::{LocalWallet, Signer}; +use starknet::signers::Signer; use ui::MigrationUi; use self::ui::{bold_message, italic_message}; -use crate::commands::migrate::MigrateArgs; -use crate::commands::options::account::AccountOptions; -use crate::commands::options::starknet::StarknetOptions; -use crate::commands::options::transaction::TransactionOptions; -use crate::commands::options::world::WorldOptions; -pub async fn execute( +#[derive(Debug, Default, Clone)] +pub struct MigrationOutput { + pub world_address: FieldElement, + pub world_tx_hash: Option, + pub world_block_number: Option, + // Represents if full migration got completeled. + // If false that means migration got partially completed. + pub full: bool, +} + +pub async fn migrate( ws: &Workspace<'_>, - args: MigrateArgs, - env_metadata: Option, -) -> Result<()> { + world_address: Option, + chain_id: String, + account: &SingleOwnerAccount, + name: Option, + dry_run: bool, +) -> Result<()> +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ let ui = ws.config().ui(); - let MigrateArgs { account, starknet, world, name, .. } = args; // Setup account for migration and fetch world address if it exists. - - let (world_address, account, chain_id) = - setup_env(ws, account, starknet, world, name.as_ref(), env_metadata.as_ref()).await?; ui.print(format!("Chain ID: {}\n", &chain_id)); // its path to a file so `parent` should never return `None` @@ -65,59 +74,101 @@ pub async fn execute( let target_dir = target_dir.join(ws.config().profile().as_str()); // Load local and remote World manifests. - let (local_manifest, remote_manifest) = - load_world_manifests(&manifest_dir, &account, world_address, &ui).await?; + load_world_manifests(&manifest_dir, account, world_address, &ui).await.map_err(|e| { + ui.error(e.to_string()); + anyhow!( + "\n Use `sozo clean` to clean your project, or `sozo clean --manifests-abis` to \ + clean manifest and abi files only.\nThen, rebuild your project with `sozo build`.", + ) + })?; // Calculate diff between local and remote World manifests. - ui.print_step(2, "🧰", "Evaluating Worlds diff..."); let diff = WorldDiff::compute(local_manifest.clone(), remote_manifest.clone()); let total_diffs = diff.count_diffs(); ui.print_sub(format!("Total diffs found: {total_diffs}")); if total_diffs == 0 { - ui.print("\n✨ No changes to be made. Remote World is already up to date!") - } else { - // Mirate according to the diff. - let world_address = apply_diff( - ws, - &target_dir, - diff, - name, - world_address, - &account, - Some(args.transaction), - ) - .await?; - - update_manifests_and_abis( - ws, - local_manifest, - remote_manifest, - &manifest_dir, - world_address, - &chain_id, - ) - .await?; + ui.print("\n✨ No changes to be made. Remote World is already up to date!"); + return Ok(()); } + let strategy = prepare_migration(&target_dir, diff, name.clone(), world_address, &ui)?; + let world_address = strategy.world_address().expect("world address must exist"); + + if dry_run { + print_strategy(&ui, account.provider(), &strategy).await; + } else { + // Migrate according to the diff. + match apply_diff(ws, account, None, &strategy).await { + Ok(migration_output) => { + update_manifests_and_abis( + ws, + local_manifest, + remote_manifest, + &manifest_dir, + migration_output, + &chain_id, + name.as_ref(), + ) + .await?; + } + Err(e) => { + update_manifests_and_abis( + ws, + local_manifest, + remote_manifest, + &manifest_dir, + MigrationOutput { world_address, ..Default::default() }, + &chain_id, + name.as_ref(), + ) + .await?; + return Err(e)?; + } + } + }; + Ok(()) } async fn update_manifests_and_abis( ws: &Workspace<'_>, local_manifest: BaseManifest, - remote_manifest: Option, + remote_manifest: Option, manifest_dir: &Utf8PathBuf, - world_address: FieldElement, + migration_output: MigrationOutput, chain_id: &str, + salt: Option<&String>, ) -> Result<()> { let ui = ws.config().ui(); ui.print("\n✨ Updating manifests..."); - let mut local_manifest: DeployedManifest = local_manifest.into(); - local_manifest.world.inner.address = Some(world_address); + let deployed_path = manifest_dir + .join(MANIFESTS_DIR) + .join(DEPLOYMENTS_DIR) + .join(chain_id) + .with_extension("toml"); + + let mut local_manifest: DeploymentManifest = local_manifest.into(); + + if deployed_path.exists() { + let previous_manifest = DeploymentManifest::load_from_path(&deployed_path)?; + local_manifest.merge_from_previous(previous_manifest); + }; + + local_manifest.world.inner.address = Some(migration_output.world_address); + if let Some(salt) = salt { + local_manifest.world.inner.seed = Some(salt.to_owned()); + } + + if migration_output.world_tx_hash.is_some() { + local_manifest.world.inner.transaction_hash = migration_output.world_tx_hash; + } + if migration_output.world_block_number.is_some() { + local_manifest.world.inner.block_number = migration_output.world_block_number; + } let base_class_hash = match remote_manifest { Some(manifest) => *manifest.base.inner.class_hash(), @@ -126,27 +177,22 @@ async fn update_manifests_and_abis( local_manifest.contracts.iter_mut().for_each(|c| { let salt = generate_salt(&c.name); - c.inner.address = Some(get_contract_address(salt, base_class_hash, &[], world_address)); + c.inner.address = + Some(get_contract_address(salt, base_class_hash, &[], migration_output.world_address)); }); // copy abi files from `abi/base` to `abi/deployments/{chain_id}` and update abi path in // local_manifest update_manifest_abis(&mut local_manifest, manifest_dir, chain_id).await; - local_manifest.write_to_path( - &manifest_dir - .join(MANIFESTS_DIR) - .join(DEPLOYMENTS_DIR) - .join(chain_id) - .with_extension("toml"), - )?; + local_manifest.write_to_path(&deployed_path)?; ui.print("\n✨ Done."); Ok(()) } async fn update_manifest_abis( - local_manifest: &mut DeployedManifest, + local_manifest: &mut DeploymentManifest, manifest_dir: &Utf8PathBuf, chain_id: &str, ) { @@ -182,42 +228,47 @@ async fn update_manifest_abis( } } -#[allow(clippy::too_many_arguments)] -pub(crate) async fn apply_diff( +pub async fn apply_diff( ws: &Workspace<'_>, - target_dir: &Utf8PathBuf, - diff: WorldDiff, - name: Option, - world_address: Option, account: &SingleOwnerAccount, - txn_config: Option, -) -> Result + txn_config: Option, + strategy: &MigrationStrategy, +) -> Result where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, { let ui = ws.config().ui(); - let strategy = prepare_migration(target_dir, diff, name, world_address, &ui)?; println!(" "); - let block_height = execute_strategy(ws, &strategy, account, txn_config) + let migration_output = execute_strategy(ws, strategy, account, txn_config) .await .map_err(|e| anyhow!(e)) .with_context(|| "Problem trying to migrate.")?; - if let Some(block_height) = block_height { - ui.print(format!( - "\n🎉 Successfully migrated World on block #{} at address {}", - block_height, - bold_message(format!( - "{:#x}", - strategy.world_address().expect("world address must exist") - )) - )); + if migration_output.full { + if let Some(block_number) = migration_output.world_block_number { + ui.print(format!( + "\n🎉 Successfully migrated World on block #{} at address {}", + block_number, + bold_message(format!( + "{:#x}", + strategy.world_address().expect("world address must exist") + )) + )); + } else { + ui.print(format!( + "\n🎉 Successfully migrated World at address {}", + bold_message(format!( + "{:#x}", + strategy.world_address().expect("world address must exist") + )) + )); + } } else { ui.print(format!( - "\n🎉 Successfully migrated World at address {}", + "\n🚨 Partially migrated World at address {}", bold_message(format!( "{:#x}", strategy.world_address().expect("world address must exist") @@ -225,52 +276,7 @@ where )); } - strategy.world_address() -} - -pub(crate) async fn setup_env( - ws: &Workspace<'_>, - account: AccountOptions, - starknet: StarknetOptions, - world: WorldOptions, - name: Option<&String>, - env: Option<&Environment>, -) -> Result<( - Option, - SingleOwnerAccount, LocalWallet>, - String, -)> { - let ui = ws.config().ui(); - - let world_address = world.address(env).ok(); - - let (account, chain_id) = { - let provider = starknet.provider(env)?; - let chain_id = provider.chain_id().await?; - let chain_id = parse_cairo_short_string(&chain_id) - .with_context(|| "Cannot parse chain_id as string")?; - - let mut account = account.account(provider, env).await?; - account.set_block_id(BlockId::Tag(BlockTag::Pending)); - - let address = account.address(); - - ui.print(format!("\nMigration account: {address:#x}")); - if let Some(name) = name { - ui.print(format!("\nWorld name: {name}\n")); - } - - match account.provider().get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await { - Ok(_) => Ok((account, chain_id)), - Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { - Err(anyhow!("Account with address {:#x} doesn't exist.", account.address())) - } - Err(e) => Err(e.into()), - } - } - .with_context(|| "Problem initializing account for migration.")?; - - Ok((world_address, account, chain_id)) + Ok(migration_output) } async fn load_world_manifests( @@ -278,7 +284,7 @@ async fn load_world_manifests( account: &SingleOwnerAccount, world_address: Option, ui: &Ui, -) -> Result<(BaseManifest, Option)> +) -> Result<(BaseManifest, Option)> where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, @@ -286,19 +292,21 @@ where ui.print_step(1, "🌎", "Building World state..."); let mut local_manifest = - BaseManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(BASE_DIR))?; + BaseManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(BASE_DIR)) + .map_err(|_| anyhow!("Fail to load local manifest file."))?; let overlay_path = manifest_dir.join(MANIFESTS_DIR).join(OVERLAYS_DIR); if overlay_path.exists() { let overlay_manifest = - OverlayManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(OVERLAYS_DIR))?; + OverlayManifest::load_from_path(&manifest_dir.join(MANIFESTS_DIR).join(OVERLAYS_DIR)) + .map_err(|_| anyhow!("Fail to load overlay manifest file."))?; // merge user defined changes to base manifest local_manifest.merge(overlay_manifest); } let remote_manifest = if let Some(address) = world_address { - match DeployedManifest::load_from_remote(account.provider(), address).await { + match DeploymentManifest::load_from_remote(account.provider(), address).await { Ok(manifest) => { ui.print_sub(format!("Found remote World: {address:#x}")); Some(manifest) @@ -320,7 +328,7 @@ where Ok((local_manifest, remote_manifest)) } -fn prepare_migration( +pub fn prepare_migration( target_dir: &Utf8PathBuf, diff: WorldDiff, name: Option, @@ -357,34 +365,34 @@ fn prepare_migration( Ok(migration) } -// returns the Some(block number) at which migration world is deployed, returns none if world was -// not redeployed pub async fn execute_strategy( ws: &Workspace<'_>, strategy: &MigrationStrategy, migrator: &SingleOwnerAccount, - txn_config: Option, -) -> Result> + txn_config: Option, +) -> Result where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, { let ui = ws.config().ui(); + let mut world_tx_hash: Option = None; + let mut world_block_number: Option = None; match &strategy.base { Some(base) => { ui.print_header("# Base Contract"); - match base - .declare(migrator, txn_config.clone().map(|c| c.into()).unwrap_or_default()) - .await - { + match base.declare(migrator, txn_config.unwrap_or_default()).await { Ok(res) => { ui.print_sub(format!("Class Hash: {:#x}", res.class_hash)); } Err(MigrationError::ClassAlreadyDeclared) => { ui.print_sub(format!("Already declared: {:#x}", base.diff.local)); } + Err(MigrationError::ArtifactError(e)) => { + return Err(handle_artifact_error(&ui, base.artifact_path(), e)); + } Err(e) => { ui.verbose(format!("{e:?}")); return Err(e.into()); @@ -399,48 +407,66 @@ where ui.print_header("# World"); let calldata = vec![strategy.base.as_ref().unwrap().diff.local]; - deploy_contract(world, "world", calldata.clone(), migrator, &ui, &txn_config) - .await - .map_err(|e| { - ui.verbose(format!("{e:?}")); - anyhow!("Failed to deploy world: {e}") - })?; + let deploy_result = + deploy_contract(world, "world", calldata.clone(), migrator, &ui, &txn_config) + .await + .map_err(|e| { + ui.verbose(format!("{e:?}")); + anyhow!("Failed to deploy world: {e}") + })?; + + (world_tx_hash, world_block_number) = + if let ContractDeploymentOutput::Output(deploy_result) = deploy_result { + (Some(deploy_result.transaction_hash), deploy_result.block_number) + } else { + (None, None) + }; ui.print_sub(format!("Contract address: {:#x}", world.contract_address)); - let metadata = dojo_metadata_from_workspace(ws); - if let Some(meta) = metadata.as_ref().and_then(|inner| inner.world()) { - match meta.upload().await { - Ok(hash) => { - let mut encoded_uri = cairo_utils::encode_uri(&format!("ipfs://{hash}"))?; - - // Metadata is expecting an array of capacity 3. - if encoded_uri.len() < 3 { - encoded_uri.extend(vec![FieldElement::ZERO; 3 - encoded_uri.len()]); + let offline = ws.config().offline(); + + if offline { + ui.print_sub("Skipping metadata upload because of offline mode"); + } else { + let metadata = dojo_metadata_from_workspace(ws); + if let Some(meta) = metadata.as_ref().and_then(|inner| inner.world()) { + match meta.upload().await { + Ok(hash) => { + let mut encoded_uri = + cairo_utils::encode_uri(&format!("ipfs://{hash}"))?; + + // Metadata is expecting an array of capacity 3. + if encoded_uri.len() < 3 { + encoded_uri.extend(vec![FieldElement::ZERO; 3 - encoded_uri.len()]); + } + + let world_metadata = ResourceMetadata { + resource_id: FieldElement::ZERO, + metadata_uri: encoded_uri, + }; + + let InvokeTransactionResult { transaction_hash } = + WorldContract::new(world.contract_address, migrator) + .set_metadata(&world_metadata) + .send() + .await + .map_err(|e| { + ui.verbose(format!("{e:?}")); + anyhow!("Failed to set World metadata: {e}") + })?; + + TransactionWaiter::new(transaction_hash, migrator.provider()).await?; + + ui.print_sub(format!( + "Set Metadata transaction: {:#x}", + transaction_hash + )); + ui.print_sub(format!("Metadata uri: ipfs://{hash}")); + } + Err(err) => { + ui.print_sub(format!("Failed to set World metadata:\n{err}")); } - - let world_metadata = ResourceMetadata { - resource_id: FieldElement::ZERO, - metadata_uri: encoded_uri, - }; - - let InvokeTransactionResult { transaction_hash } = - WorldContract::new(world.contract_address, migrator) - .set_metadata(&world_metadata) - .send() - .await - .map_err(|e| { - ui.verbose(format!("{e:?}")); - anyhow!("Failed to set World metadata: {e}") - })?; - - TransactionWaiter::new(transaction_hash, migrator.provider()).await?; - - ui.print_sub(format!("Set Metadata transaction: {:#x}", transaction_hash)); - ui.print_sub(format!("Metadata uri: ipfs://{hash}")); - } - Err(err) => { - ui.print_sub(format!("Failed to set World metadata:\n{err}")); } } } @@ -448,16 +474,34 @@ where None => {} }; + let mut migration_output = MigrationOutput { + world_address: strategy.world_address()?, + world_tx_hash, + world_block_number, + full: false, + }; + // Once Torii supports indexing arrays, we should declare and register the // ResourceMetadata model. - register_models(strategy, migrator, &ui, txn_config.clone()).await?; - deploy_contracts(strategy, migrator, &ui, txn_config).await?; + match register_models(strategy, migrator, &ui, txn_config).await { + Ok(_) => (), + Err(e) => { + ui.anyhow(&e); + return Ok(migration_output); + } + } + match deploy_contracts(strategy, migrator, &ui, txn_config).await { + Ok(_) => (), + Err(e) => { + ui.anyhow(&e); + return Ok(migration_output); + } + }; - // This gets current block numder if helpful - // let block_height = migrator.provider().block_number().await.ok(); + migration_output.full = true; - Ok(None) + Ok(migration_output) } enum ContractDeploymentOutput { @@ -471,19 +515,14 @@ async fn deploy_contract( constructor_calldata: Vec, migrator: &SingleOwnerAccount, ui: &Ui, - txn_config: &Option, + txn_config: &Option, ) -> Result where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, { match contract - .deploy( - contract.diff.local, - constructor_calldata, - migrator, - txn_config.clone().map(|c| c.into()).unwrap_or_default(), - ) + .deploy(contract.diff.local, constructor_calldata, migrator, txn_config.unwrap_or_default()) .await { Ok(val) => { @@ -501,6 +540,9 @@ where Err(MigrationError::ContractAlreadyDeployed(contract_address)) => { Ok(ContractDeploymentOutput::AlreadyDeployed(contract_address)) } + Err(MigrationError::ArtifactError(e)) => { + return Err(handle_artifact_error(ui, contract.artifact_path(), e)); + } Err(e) => { ui.verbose(format!("{e:?}")); Err(anyhow!("Failed to migrate {contract_id}: {e}")) @@ -512,7 +554,7 @@ async fn register_models( strategy: &MigrationStrategy, migrator: &SingleOwnerAccount, ui: &Ui, - txn_config: Option, + txn_config: Option, ) -> Result> where P: Provider + Sync + Send + 'static, @@ -531,8 +573,7 @@ where for c in models.iter() { ui.print(italic_message(&c.diff.name).to_string()); - let res = - c.declare(migrator, txn_config.clone().map(|c| c.into()).unwrap_or_default()).await; + let res = c.declare(migrator, txn_config.unwrap_or_default()).await; match res { Ok(output) => { ui.print_hidden_sub(format!("Declare transaction: {:#x}", output.transaction_hash)); @@ -545,6 +586,9 @@ where ui.print_sub(format!("Already declared: {:#x}", c.diff.local)); continue; } + Err(MigrationError::ArtifactError(e)) => { + return Err(handle_artifact_error(ui, c.artifact_path(), e)); + } Err(e) => { ui.verbose(format!("{e:?}")); bail!("Failed to declare model {}: {e}", c.diff.name) @@ -579,7 +623,7 @@ async fn deploy_contracts( strategy: &MigrationStrategy, migrator: &SingleOwnerAccount, ui: &Ui, - txn_config: Option, + txn_config: Option, ) -> Result>> where P: Provider + Sync + Send + 'static, @@ -605,7 +649,7 @@ where world_address, contract.diff.local, migrator, - txn_config.clone().map(|c| c.into()).unwrap_or_default(), + txn_config.unwrap_or_default(), ) .await { @@ -625,6 +669,9 @@ where ui.print_sub(format!("Already deployed: {:#x}", contract_address)); deploy_output.push(None); } + Err(MigrationError::ArtifactError(e)) => { + return Err(handle_artifact_error(ui, contract.artifact_path(), e)); + } Err(e) => { ui.verbose(format!("{e:?}")); return Err(anyhow!("Failed to migrate {name}: {e}")); @@ -634,3 +681,89 @@ where Ok(deploy_output) } + +pub fn handle_artifact_error(ui: &Ui, artifact_path: &Path, error: anyhow::Error) -> anyhow::Error { + let path = artifact_path.to_string_lossy(); + let name = artifact_path.file_name().unwrap().to_string_lossy(); + ui.verbose(format!("{path}: {error:?}")); + + anyhow!( + "Discrepancy detected in {name}.\nUse `sozo clean` to clean your project or `sozo clean \ + --artifacts` to clean artifacts only.\nThen, rebuild your project with `sozo build`." + ) +} + +pub async fn get_contract_operation_name

( + provider: &P, + contract: &ContractMigration, + world_address: Option, +) -> String +where + P: Provider + Sync + Send + 'static, +{ + if let Some(world_address) = world_address { + if let Ok(base_class_hash) = provider + .call( + FunctionCall { + contract_address: world_address, + calldata: vec![], + entry_point_selector: get_selector_from_name("base").unwrap(), + }, + BlockId::Tag(BlockTag::Pending), + ) + .await + { + let contract_address = + get_contract_address(contract.salt, base_class_hash[0], &[], world_address); + + match provider + .get_class_hash_at(BlockId::Tag(BlockTag::Pending), contract_address) + .await + { + Ok(current_class_hash) if current_class_hash != contract.diff.local => { + return format!("upgrade {}", contract.diff.name); + } + Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { + return format!("deploy {}", contract.diff.name); + } + Ok(_) => return "already deployed".to_string(), + Err(_) => return format!("deploy {}", contract.diff.name), + } + } + } + format!("deploy {}", contract.diff.name) +} + +pub async fn print_strategy

(ui: &Ui, provider: &P, strategy: &MigrationStrategy) +where + P: Provider + Sync + Send + 'static, +{ + ui.print("\n📋 Migration Strategy\n"); + + if let Some(base) = &strategy.base { + ui.print_header("# Base Contract"); + ui.print_sub(format!("declare (class hash: {:#x})\n", base.diff.local)); + } + + if let Some(world) = &strategy.world { + ui.print_header("# World"); + ui.print_sub(format!("declare (class hash: {:#x})\n", world.diff.local)); + } + + if !&strategy.models.is_empty() { + ui.print_header(format!("# Models ({})", &strategy.models.len())); + for m in &strategy.models { + ui.print_sub(format!("register {} (class hash: {:#x})", m.diff.name, m.diff.local)); + } + ui.print(" "); + } + + if !&strategy.contracts.is_empty() { + ui.print_header(format!("# Contracts ({})", &strategy.contracts.len())); + for c in &strategy.contracts { + let op_name = get_contract_operation_name(provider, c, strategy.world_address).await; + ui.print_sub(format!("{op_name} (class hash: {:#x})", c.diff.local)); + } + ui.print(" "); + } +} diff --git a/bin/sozo/src/ops/migration/ui.rs b/crates/sozo/ops/src/migration/ui.rs similarity index 100% rename from bin/sozo/src/ops/migration/ui.rs rename to crates/sozo/ops/src/migration/ui.rs diff --git a/crates/sozo/ops/src/model.rs b/crates/sozo/ops/src/model.rs new file mode 100644 index 0000000000..ffec8c6f28 --- /dev/null +++ b/crates/sozo/ops/src/model.rs @@ -0,0 +1,74 @@ +use anyhow::Result; +use dojo_world::contracts::model::ModelReader; +use dojo_world::contracts::world::WorldContractReader; +use starknet::core::types::{BlockId, BlockTag, FieldElement}; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; + +pub async fn model_class_hash( + name: String, + world_address: FieldElement, + provider: JsonRpcClient, +) -> Result<()> { + let world = WorldContractReader::new(world_address, &provider) + .with_block(BlockId::Tag(BlockTag::Pending)); + + let model = world.model_reader(&name).await?; + + println!("{:#x}", model.class_hash()); + + Ok(()) +} + +pub async fn model_contract_address( + name: String, + world_address: FieldElement, + provider: JsonRpcClient, +) -> Result<()> { + let world = WorldContractReader::new(world_address, &provider) + .with_block(BlockId::Tag(BlockTag::Pending)); + + let model = world.model_reader(&name).await?; + + println!("{:#x}", model.contract_address()); + + Ok(()) +} + +pub async fn model_schema( + name: String, + world_address: FieldElement, + provider: JsonRpcClient, + to_json: bool, +) -> Result<()> { + let world = WorldContractReader::new(world_address, &provider) + .with_block(BlockId::Tag(BlockTag::Pending)); + + let model = world.model_reader(&name).await?; + let schema = model.schema().await?; + + if to_json { + println!("{}", serde_json::to_string_pretty(&schema)?) + } else { + println!("{schema}"); + } + + Ok(()) +} + +pub async fn model_get( + name: String, + keys: Vec, + world_address: FieldElement, + provider: JsonRpcClient, +) -> Result<()> { + let world = WorldContractReader::new(world_address, &provider) + .with_block(BlockId::Tag(BlockTag::Pending)); + + let model = world.model_reader(&name).await?; + let entity = model.entity(&keys).await?; + + println!("{entity}"); + + Ok(()) +} diff --git a/crates/sozo/ops/src/register.rs b/crates/sozo/ops/src/register.rs new file mode 100644 index 0000000000..882ca55c8a --- /dev/null +++ b/crates/sozo/ops/src/register.rs @@ -0,0 +1,78 @@ +use std::collections::HashMap; + +use anyhow::{Context, Result}; +use dojo_world::contracts::model::ModelReader; +use dojo_world::contracts::{WorldContract, WorldContractReader}; +use dojo_world::manifest::DeploymentManifest; +use dojo_world::migration::TxConfig; +use scarb::core::Config; +use starknet::accounts::ConnectedAccount; +use starknet::providers::Provider; +use starknet_crypto::FieldElement; + +use crate::utils::handle_transaction_result; + +pub async fn model_register( + models: Vec, + world: &WorldContract, + transaction: TxConfig, + world_reader: WorldContractReader

, + world_address: FieldElement, + config: &Config, +) -> Result<()> +where + A: ConnectedAccount + Sync + Send + 'static, + P: Provider + Sync + Send, +{ + let manifest = { + match DeploymentManifest::load_from_remote(&world.account.provider(), world_address).await { + Ok(manifest) => manifest, + Err(e) => { + return Err(anyhow::anyhow!("Failed to build remote World state: {e}")); + } + } + }; + + let registered_models_names = manifest.models.iter().map(|m| m.name.as_str()); + let mut model_class_hashes = HashMap::new(); + for model_name in registered_models_names { + let read_model = world_reader.model_reader(model_name).await?; + let class_hash = read_model.class_hash(); + model_class_hashes.insert(class_hash, model_name); + } + + let mut models_to_register = Vec::new(); + for input_model in models { + if let Some(model_name) = model_class_hashes.get(&input_model) { + config.ui().print(format!( + "\"{}\" model already registered with the class hash \"{:#x}\"", + model_name, input_model + )); + } else { + models_to_register.push(input_model); + } + } + + if models_to_register.is_empty() { + config.ui().print("No new models to register."); + return Ok(()); + } + + let calls = models_to_register + .iter() + .map(|c| world.register_model_getcall(&(*c).into())) + .collect::>(); + + let res = + world.account.execute(calls).send().await.with_context(|| "Failed to send transaction")?; + + handle_transaction_result( + &world.account.provider(), + res, + transaction.wait, + transaction.receipt, + ) + .await?; + + Ok(()) +} diff --git a/crates/sozo/ops/src/utils.rs b/crates/sozo/ops/src/utils.rs new file mode 100644 index 0000000000..a2aaf99f96 --- /dev/null +++ b/crates/sozo/ops/src/utils.rs @@ -0,0 +1,37 @@ +use anyhow::Result; +use dojo_world::utils::{execution_status_from_maybe_pending_receipt, TransactionWaiter}; +use starknet::core::types::{ExecutionResult, InvokeTransactionResult}; +use starknet::providers::Provider; + +pub async fn handle_transaction_result

( + provider: P, + transaction_result: InvokeTransactionResult, + wait_for_tx: bool, + show_receipt: bool, +) -> Result<()> +where + P: Provider + Send, +{ + println!("Transaction hash: {:#x}", transaction_result.transaction_hash); + + if wait_for_tx { + let receipt = + TransactionWaiter::new(transaction_result.transaction_hash, &provider).await?; + + if show_receipt { + println!("Receipt:\n{}", serde_json::to_string_pretty(&receipt)?); + } else { + match execution_status_from_maybe_pending_receipt(&receipt) { + ExecutionResult::Succeeded => { + println!("Status: OK"); + } + ExecutionResult::Reverted { reason } => { + println!("Status: REVERTED"); + println!("Reason:\n{}", reason); + } + }; + } + } + + Ok(()) +} diff --git a/crates/torii/client/src/client/mod.rs b/crates/torii/client/src/client/mod.rs index 07c96bf77c..aad72ef099 100644 --- a/crates/torii/client/src/client/mod.rs +++ b/crates/torii/client/src/client/mod.rs @@ -10,8 +10,6 @@ use dojo_types::packing::unpack; use dojo_types::schema::Ty; use dojo_types::WorldMetadata; use dojo_world::contracts::WorldContractReader; -use futures::channel::mpsc::UnboundedReceiver; -use futures_util::lock::Mutex; use parking_lot::{RwLock, RwLockReadGuard}; use starknet::core::utils::cairo_short_string_to_felt; use starknet::providers::jsonrpc::HttpTransport; @@ -22,7 +20,7 @@ use torii_grpc::client::{EntityUpdateStreaming, ModelDiffsStreaming}; use torii_grpc::proto::world::RetrieveEntitiesResponse; use torii_grpc::types::schema::Entity; use torii_grpc::types::{KeysClause, Query}; -use torii_relay::client::{EventLoop, Message}; +use torii_relay::types::Message; use crate::client::error::{Error, ParseError}; use crate::client::storage::ModelStorage; @@ -106,41 +104,17 @@ impl Client { self.relay_client.command_sender.wait_for_relay().await.map_err(Error::RelayClient) } - /// Subscribes to a topic. - /// Returns true if the topic was subscribed to. - /// Returns false if the topic was already subscribed to. - pub async fn subscribe_topic(&mut self, topic: String) -> Result { - self.relay_client.command_sender.subscribe(topic).await.map_err(Error::RelayClient) - } - - /// Unsubscribes from a topic. - /// Returns true if the topic was subscribed to. - pub async fn unsubscribe_topic(&mut self, topic: String) -> Result { - self.relay_client.command_sender.unsubscribe(topic).await.map_err(Error::RelayClient) - } - /// Publishes a message to a topic. /// Returns the message id. - pub async fn publish_message(&mut self, topic: &str, message: &[u8]) -> Result, Error> { + pub async fn publish_message(&mut self, message: Message) -> Result, Error> { self.relay_client .command_sender - .publish(topic.to_string(), message.to_vec()) + .publish(message) .await .map_err(Error::RelayClient) .map(|m| m.0) } - /// Returns the event loop of the relay client. - /// Which can then be used to run the relay client - pub fn relay_client_runner(&self) -> Arc> { - self.relay_client.event_loop.clone() - } - - /// Returns the message receiver of the relay client. - pub fn relay_client_stream(&self) -> Arc>> { - self.relay_client.message_receiver.clone() - } - /// Returns a read lock on the World metadata that the client is connected to. pub fn metadata(&self) -> RwLockReadGuard<'_, WorldMetadata> { self.metadata.read() diff --git a/crates/torii/core/Cargo.toml b/crates/torii/core/Cargo.toml index 8baed226c5..6faaeb040e 100644 --- a/crates/torii/core/Cargo.toml +++ b/crates/torii/core/Cargo.toml @@ -35,6 +35,7 @@ tokio = { version = "1.32.0", features = [ "sync" ], default-features = true } tokio-stream = "0.1.11" tokio-util = "0.7.7" tracing.workspace = true +sozo-ops.workspace = true [dev-dependencies] camino.workspace = true diff --git a/crates/torii/core/src/engine.rs b/crates/torii/core/src/engine.rs index 01e9d82acf..aad30273ca 100644 --- a/crates/torii/core/src/engine.rs +++ b/crates/torii/core/src/engine.rs @@ -3,8 +3,8 @@ use std::time::Duration; use anyhow::Result; use dojo_world::contracts::world::WorldContractReader; use starknet::core::types::{ - BlockId, BlockWithTxs, Event, InvokeTransaction, MaybePendingBlockWithTxs, - MaybePendingTransactionReceipt, Transaction, TransactionReceipt, + BlockId, EmittedEvent, Event, EventFilter, MaybePendingTransactionReceipt, Transaction, + TransactionReceipt, }; use starknet::core::utils::get_selector_from_name; use starknet::providers::Provider; @@ -33,17 +33,18 @@ impl Default for Processors

{ pub struct EngineConfig { pub block_time: Duration, pub start_block: u64, + pub events_chunk_size: u64, } impl Default for EngineConfig { fn default() -> Self { - Self { block_time: Duration::from_secs(1), start_block: 0 } + Self { block_time: Duration::from_secs(1), start_block: 0, events_chunk_size: 1000 } } } -pub struct Engine<'db, P: Provider + Sync> { +pub struct Engine { world: WorldContractReader

, - db: &'db mut Sql, + db: Sql, provider: Box

, processors: Processors

, config: EngineConfig, @@ -56,10 +57,10 @@ struct UnprocessedEvent { data: Vec, } -impl<'db, P: Provider + Sync> Engine<'db, P> { +impl Engine

{ pub fn new( world: WorldContractReader

, - db: &'db mut Sql, + db: Sql, provider: P, processors: Processors

, config: EngineConfig, @@ -119,67 +120,65 @@ impl<'db, P: Provider + Sync> Engine<'db, P> { Ok(latest_block_number) } - pub async fn sync_range(&mut self, mut from: u64, to: u64) -> Result<()> { + pub async fn sync_range(&mut self, from: u64, to: u64) -> Result<()> { // Process all blocks from current to latest. - while from <= to { - let block_with_txs = match self.provider.get_block_with_txs(BlockId::Number(from)).await - { - Ok(block_with_txs) => block_with_txs, - Err(e) => { - error!("getting block: {}", e); - continue; - } - }; + let get_events = |token: Option| { + self.provider.get_events( + EventFilter { + from_block: Some(BlockId::Number(from)), + to_block: Some(BlockId::Number(to)), + address: Some(self.world.address), + keys: None, + }, + token, + self.config.events_chunk_size, + ) + }; - // send the current block number - if let Some(ref block_tx) = self.block_tx { - block_tx.send(from).await.expect("failed to send block number to gRPC server"); - } + // handle next events pages + let mut events_pages = vec![get_events(None).await?]; - match self.process(block_with_txs).await { - Ok(_) => { - self.db.set_head(from); - self.db.execute().await?; - from += 1; - } - Err(e) => { - error!("processing block: {}", e); - continue; - } + while let Some(token) = &events_pages.last().unwrap().continuation_token { + events_pages.push(get_events(Some(token.clone())).await?); + } + + let mut last_block: u64 = 0; + for events_page in events_pages { + for event in events_page.events { + self.process(event, &mut last_block).await?; } } + self.db.execute().await?; + Ok(()) } - async fn process(&mut self, block: MaybePendingBlockWithTxs) -> Result<()> { - let block: BlockWithTxs = match block { - MaybePendingBlockWithTxs::Block(block) => block, - _ => return Ok(()), + async fn process(&mut self, event: EmittedEvent, last_block: &mut u64) -> Result<()> { + let block_number = match event.block_number { + Some(block_number) => block_number, + None => { + error!("event without block number"); + return Ok(()); + } }; - Self::process_block(self, &block).await?; + if block_number > *last_block { + *last_block = block_number; - for (tx_idx, transaction) in block.clone().transactions.iter().enumerate() { - let transaction_hash = match transaction { - Transaction::Invoke(invoke_transaction) => { - if let InvokeTransaction::V1(invoke_transaction) = invoke_transaction { - invoke_transaction.transaction_hash - } else { - continue; - } - } - Transaction::L1Handler(l1_handler_transaction) => { - l1_handler_transaction.transaction_hash - } - _ => continue, - }; + if let Some(ref block_tx) = self.block_tx { + block_tx.send(block_number).await?; + } - self.process_transaction_and_receipt(transaction_hash, transaction, &block, tx_idx) - .await?; + Self::process_block(self, block_number, event.block_hash.unwrap()).await?; + info!(target: "torii_core::engine", block_number = %block_number, "Processed block"); + + self.db.set_head(block_number); } - info!("processed block: {}", block.block_number); + let transaction = self.provider.get_transaction_by_hash(event.transaction_hash).await?; + self.process_transaction_and_receipt(event.transaction_hash, &transaction, block_number) + .await?; Ok(()) } @@ -188,8 +187,7 @@ impl<'db, P: Provider + Sync> Engine<'db, P> { &mut self, transaction_hash: FieldElement, transaction: &Transaction, - block: &BlockWithTxs, - tx_idx: usize, + block_number: u64, ) -> Result<()> { let receipt = match self.provider.get_transaction_receipt(transaction_hash).await { Ok(receipt) => match receipt { @@ -222,45 +220,51 @@ impl<'db, P: Provider + Sync> Engine<'db, P> { world_event = true; let event_id = - format!("0x{:064x}:0x{:04x}:0x{:04x}", block.block_number, tx_idx, event_idx); + format!("{:#064x}:{:#x}:{:#04x}", block_number, transaction_hash, event_idx); - Self::process_event(self, block, &receipt, &event_id, event).await?; + Self::process_event(self, block_number, &receipt, &event_id, event).await?; } if world_event { - let transaction_id = format!("0x{:064x}:0x{:04x}", block.block_number, tx_idx); - - Self::process_transaction(self, block, &receipt, &transaction_id, transaction) - .await?; + Self::process_transaction( + self, + block_number, + &receipt, + transaction_hash, + transaction, + ) + .await?; } } Ok(()) } - async fn process_block(&mut self, block: &BlockWithTxs) -> Result<()> { + async fn process_block(&mut self, block_number: u64, block_hash: FieldElement) -> Result<()> { for processor in &self.processors.block { - processor.process(self.db, self.provider.as_ref(), block).await?; + processor + .process(&mut self.db, self.provider.as_ref(), block_number, block_hash) + .await?; } Ok(()) } async fn process_transaction( &mut self, - block: &BlockWithTxs, + block_number: u64, transaction_receipt: &TransactionReceipt, - transaction_id: &str, + transaction_hash: FieldElement, transaction: &Transaction, ) -> Result<()> { for processor in &self.processors.transaction { processor .process( - self.db, + &mut self.db, self.provider.as_ref(), - block, + block_number, transaction_receipt, + transaction_hash, transaction, - transaction_id, ) .await? } @@ -270,7 +274,7 @@ impl<'db, P: Provider + Sync> Engine<'db, P> { async fn process_event( &mut self, - block: &BlockWithTxs, + block_number: u64, transaction_receipt: &TransactionReceipt, event_id: &str, event: &Event, @@ -284,11 +288,21 @@ impl<'db, P: Provider + Sync> Engine<'db, P> { }; self.db.store_event(event_id, event, transaction_hash); for processor in &self.processors.event { - if get_selector_from_name(&processor.event_key())? == event.keys[0] + // If the processor has no event_key, means it's a catch-all processor. + // We also validate the event + if (processor.event_key().is_empty() + || get_selector_from_name(&processor.event_key())? == event.keys[0]) && processor.validate(event) { processor - .process(&self.world, self.db, block, transaction_receipt, event_id, event) + .process( + &self.world, + &mut self.db, + block_number, + transaction_receipt, + event_id, + event, + ) .await?; } else { let unprocessed_event = UnprocessedEvent { diff --git a/crates/torii/core/src/error.rs b/crates/torii/core/src/error.rs index 43f8b628be..d58898a7a8 100644 --- a/crates/torii/core/src/error.rs +++ b/crates/torii/core/src/error.rs @@ -3,7 +3,7 @@ use std::num::ParseIntError; use dojo_types::primitive::PrimitiveError; use dojo_types::schema::EnumError; use starknet::core::types::{FromByteSliceError, FromStrError}; -use starknet::core::utils::CairoShortStringToFeltError; +use starknet::core::utils::{CairoShortStringToFeltError, NonAsciiNameError}; #[derive(Debug, thiserror::Error)] pub enum Error { @@ -21,6 +21,8 @@ pub enum Error { #[derive(Debug, thiserror::Error)] pub enum ParseError { + #[error(transparent)] + NonAsciiName(#[from] NonAsciiNameError), #[error(transparent)] FromStr(#[from] FromStrError), #[error(transparent)] diff --git a/crates/torii/core/src/model.rs b/crates/torii/core/src/model.rs index 7b3d0a4fd6..59b3fe1044 100644 --- a/crates/torii/core/src/model.rs +++ b/crates/torii/core/src/model.rs @@ -8,6 +8,7 @@ use dojo_world::contracts::model::ModelReader; use sqlx::sqlite::SqliteRow; use sqlx::{Pool, Row, Sqlite}; use starknet::core::types::FieldElement; +use starknet::core::utils::get_selector_from_name; use super::error::{self, Error}; use crate::error::{ParseError, QueryError}; @@ -59,6 +60,10 @@ impl ModelSQLReader { #[cfg_attr(not(target_arch = "wasm32"), async_trait)] #[cfg_attr(target_arch = "wasm32", async_trait(?Send))] impl ModelReader for ModelSQLReader { + fn name(&self) -> String { + self.name.to_string() + } + fn class_hash(&self) -> FieldElement { self.class_hash } @@ -68,11 +73,15 @@ impl ModelReader for ModelSQLReader { } async fn schema(&self) -> Result { + // this is temporary until the hash for the model name is precomputed + let model_selector = + get_selector_from_name(&self.name).map_err(error::ParseError::NonAsciiName)?; + let model_members: Vec = sqlx::query_as( "SELECT id, model_idx, member_idx, name, type, type_enum, enum_options, key FROM \ model_members WHERE model_id = ? ORDER BY model_idx ASC, member_idx ASC", ) - .bind(self.name.clone()) + .bind(format!("{:#x}", model_selector)) .fetch_all(&self.pool) .await?; diff --git a/crates/torii/core/src/processors/event_message.rs b/crates/torii/core/src/processors/event_message.rs new file mode 100644 index 0000000000..c83a35930a --- /dev/null +++ b/crates/torii/core/src/processors/event_message.rs @@ -0,0 +1,65 @@ +use anyhow::{Error, Result}; +use async_trait::async_trait; +use dojo_world::contracts::model::ModelReader; +use dojo_world::contracts::world::WorldContractReader; +use starknet::core::types::{Event, TransactionReceipt}; +use starknet::providers::Provider; +use tracing::info; + +use super::EventProcessor; +use crate::processors::MODEL_INDEX; +use crate::sql::Sql; + +#[derive(Default)] +pub struct EventMessageProcessor; + +#[async_trait] +impl

EventProcessor

for EventMessageProcessor +where + P: Provider + Send + Sync, +{ + fn event_key(&self) -> String { + "".to_string() + } + + fn validate(&self, event: &Event) -> bool { + // we expect at least 3 keys + // 1: event selector + // 2: model keys, arbitrary length + // last key: system key + if event.keys.len() < 3 { + return false; + } + + true + } + + async fn process( + &self, + _world: &WorldContractReader

, + db: &mut Sql, + _block_number: u64, + _transaction_receipt: &TransactionReceipt, + event_id: &str, + event: &Event, + ) -> Result<(), Error> { + // silently ignore if the model is not found + let model = match db.model(&format!("{:#x}", event.keys[MODEL_INDEX])).await { + Ok(model) => model, + Err(_) => return Ok(()), + }; + + info!("store event message: {}", model.name()); + + // skip the first key, as its the event selector + // and dont include last key as its the system key + let mut keys_and_unpacked = + [event.keys[1..event.keys.len() - 1].to_vec(), event.data.clone()].concat(); + + let mut entity = model.schema().await?; + entity.deserialize(&mut keys_and_unpacked)?; + + db.set_event_message(entity, event_id).await?; + Ok(()) + } +} diff --git a/crates/torii/core/src/processors/metadata_update.rs b/crates/torii/core/src/processors/metadata_update.rs index 0135df0a73..2e94ccd0c8 100644 --- a/crates/torii/core/src/processors/metadata_update.rs +++ b/crates/torii/core/src/processors/metadata_update.rs @@ -7,7 +7,7 @@ use base64::Engine as _; use dojo_world::contracts::world::WorldContractReader; use dojo_world::metadata::{Uri, WorldMetadata}; use reqwest::Client; -use starknet::core::types::{BlockWithTxs, Event, TransactionReceipt}; +use starknet::core::types::{Event, TransactionReceipt}; use starknet::core::utils::parse_cairo_short_string; use starknet::providers::Provider; use starknet_crypto::FieldElement; @@ -48,7 +48,7 @@ where &self, _world: &WorldContractReader

, db: &mut Sql, - _block: &BlockWithTxs, + _block_number: u64, _transaction_receipt: &TransactionReceipt, _event_id: &str, event: &Event, diff --git a/crates/torii/core/src/processors/mod.rs b/crates/torii/core/src/processors/mod.rs index 3957fc5f1c..1d12817655 100644 --- a/crates/torii/core/src/processors/mod.rs +++ b/crates/torii/core/src/processors/mod.rs @@ -1,11 +1,13 @@ use anyhow::{Error, Result}; use async_trait::async_trait; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{BlockWithTxs, Event, Transaction, TransactionReceipt}; +use starknet::core::types::{Event, Transaction, TransactionReceipt}; use starknet::providers::Provider; +use starknet_crypto::FieldElement; use crate::sql::Sql; +pub mod event_message; pub mod metadata_update; pub mod register_model; pub mod store_del_record; @@ -33,7 +35,7 @@ where &self, world: &WorldContractReader

, db: &mut Sql, - block: &BlockWithTxs, + block_number: u64, transaction_receipt: &TransactionReceipt, event_id: &str, event: &Event, @@ -43,7 +45,13 @@ where #[async_trait] pub trait BlockProcessor { fn get_block_number(&self) -> String; - async fn process(&self, db: &mut Sql, provider: &P, block: &BlockWithTxs) -> Result<(), Error>; + async fn process( + &self, + db: &mut Sql, + provider: &P, + block_number: u64, + block_hash: FieldElement, + ) -> Result<(), Error>; } #[async_trait] @@ -52,9 +60,9 @@ pub trait TransactionProcessor { &self, db: &mut Sql, provider: &P, - block: &BlockWithTxs, + block_number: u64, transaction_receipt: &TransactionReceipt, + transaction_hash: FieldElement, transaction: &Transaction, - transaction_id: &str, ) -> Result<(), Error>; } diff --git a/crates/torii/core/src/processors/register_model.rs b/crates/torii/core/src/processors/register_model.rs index d89f21c981..fd2f14b8dc 100644 --- a/crates/torii/core/src/processors/register_model.rs +++ b/crates/torii/core/src/processors/register_model.rs @@ -2,7 +2,7 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{BlockWithTxs, Event, TransactionReceipt}; +use starknet::core::types::{Event, TransactionReceipt}; use starknet::core::utils::parse_cairo_short_string; use starknet::providers::Provider; use tracing::{debug, info}; @@ -38,7 +38,7 @@ where &self, world: &WorldContractReader

, db: &mut Sql, - _block: &BlockWithTxs, + _block_number: u64, _transaction_receipt: &TransactionReceipt, _event_id: &str, event: &Event, diff --git a/crates/torii/core/src/processors/store_del_record.rs b/crates/torii/core/src/processors/store_del_record.rs index fdd29d3c7a..26d5272148 100644 --- a/crates/torii/core/src/processors/store_del_record.rs +++ b/crates/torii/core/src/processors/store_del_record.rs @@ -2,8 +2,8 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{BlockWithTxs, Event, TransactionReceipt}; -use starknet::core::utils::parse_cairo_short_string; +use starknet::core::types::{Event, TransactionReceipt}; +use starknet::core::utils::{get_selector_from_name, parse_cairo_short_string}; use starknet::providers::Provider; use tracing::info; @@ -39,7 +39,7 @@ where &self, _world: &WorldContractReader

, db: &mut Sql, - _block: &BlockWithTxs, + _block_number: u64, _transaction_receipt: &TransactionReceipt, _event_id: &str, event: &Event, @@ -47,7 +47,8 @@ where let name = parse_cairo_short_string(&event.data[MODEL_INDEX])?; info!("store delete record: {}", name); - let model = db.model(&name).await?; + // this is temporary until the model name hash is precomputed + let model = db.model(&format!("{:#x}", get_selector_from_name(&name)?)).await?; let keys_start = NUM_KEYS_INDEX + 1; let keys = event.data[keys_start..].to_vec(); diff --git a/crates/torii/core/src/processors/store_set_record.rs b/crates/torii/core/src/processors/store_set_record.rs index 35c2da8055..e5379464d6 100644 --- a/crates/torii/core/src/processors/store_set_record.rs +++ b/crates/torii/core/src/processors/store_set_record.rs @@ -2,8 +2,8 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{BlockWithTxs, Event, TransactionReceipt}; -use starknet::core::utils::parse_cairo_short_string; +use starknet::core::types::{Event, TransactionReceipt}; +use starknet::core::utils::{get_selector_from_name, parse_cairo_short_string}; use starknet::providers::Provider; use tracing::info; @@ -39,7 +39,7 @@ where &self, _world: &WorldContractReader

, db: &mut Sql, - _block: &BlockWithTxs, + _block_number: u64, _transaction_receipt: &TransactionReceipt, event_id: &str, event: &Event, @@ -47,7 +47,8 @@ where let name = parse_cairo_short_string(&event.data[MODEL_INDEX])?; info!("store set record: {}", name); - let model = db.model(&name).await?; + // this is temporary until the model name hash is precomputed + let model = db.model(&format!("{:#x}", get_selector_from_name(&name)?)).await?; let keys_start = NUM_KEYS_INDEX + 1; let keys_end: usize = keys_start + usize::from(u8::try_from(event.data[NUM_KEYS_INDEX])?); diff --git a/crates/torii/core/src/processors/store_transaction.rs b/crates/torii/core/src/processors/store_transaction.rs index 8bf30d1d17..3caf2c7654 100644 --- a/crates/torii/core/src/processors/store_transaction.rs +++ b/crates/torii/core/src/processors/store_transaction.rs @@ -1,7 +1,8 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; -use starknet::core::types::{BlockWithTxs, Transaction, TransactionReceipt}; +use starknet::core::types::{Transaction, TransactionReceipt}; use starknet::providers::Provider; +use starknet_crypto::FieldElement; use super::TransactionProcessor; use crate::sql::Sql; @@ -15,12 +16,13 @@ impl TransactionProcessor

for StoreTransactionProcessor { &self, db: &mut Sql, _provider: &P, - _block: &BlockWithTxs, + block_number: u64, _receipt: &TransactionReceipt, + transaction_hash: FieldElement, transaction: &Transaction, - transaction_id: &str, ) -> Result<(), Error> { - db.store_transaction(transaction, transaction_id); + let transaction_id = format!("{:#064x}:{:#x}", block_number, transaction_hash); + db.store_transaction(transaction, &transaction_id); Ok(()) } diff --git a/crates/torii/core/src/sql.rs b/crates/torii/core/src/sql.rs index 6289ab2e0a..d0a406040e 100644 --- a/crates/torii/core/src/sql.rs +++ b/crates/torii/core/src/sql.rs @@ -9,6 +9,7 @@ use dojo_world::metadata::WorldMetadata; use sqlx::pool::PoolConnection; use sqlx::{Pool, Sqlite}; use starknet::core::types::{Event, FieldElement, InvokeTransaction, Transaction}; +use starknet::core::utils::get_selector_from_name; use starknet_crypto::poseidon_hash_many; use super::World; @@ -26,7 +27,7 @@ mod test; #[derive(Debug, Clone)] pub struct Sql { world_address: FieldElement, - pool: Pool, + pub pool: Pool, query_queue: QueryQueue, } @@ -95,7 +96,8 @@ impl Sql { layout=EXCLUDED.layout, packed_size=EXCLUDED.packed_size, \ unpacked_size=EXCLUDED.unpacked_size RETURNING *"; let model_registered: ModelRegistered = sqlx::query_as(insert_models) - .bind(model.name()) + // this is temporary until the model hash is precomputed + .bind(&format!("{:#x}", &get_selector_from_name(&model.name())?)) .bind(model.name()) .bind(format!("{class_hash:#x}")) .bind(format!("{contract_address:#x}")) @@ -129,7 +131,10 @@ impl Sql { self.query_queue.enqueue( "INSERT INTO entity_model (entity_id, model_id) VALUES (?, ?) ON CONFLICT(entity_id, \ model_id) DO NOTHING", - vec![Argument::String(entity_id.clone()), Argument::String(entity.name())], + vec![ + Argument::String(entity_id.clone()), + Argument::String(format!("{:#x}", get_selector_from_name(&entity.name())?)), + ], ); let keys_str = felts_sql_string(&keys); @@ -144,7 +149,48 @@ impl Sql { .await?; let path = vec![entity.name()]; - self.build_set_entity_queries_recursive(path, event_id, &entity_id, &entity); + self.build_set_entity_queries_recursive(path, event_id, &entity_id, &entity, false); + self.query_queue.execute_all().await?; + + SimpleBroker::publish(entity_updated); + + Ok(()) + } + + pub async fn set_event_message(&mut self, entity: Ty, event_id: &str) -> Result<()> { + let keys = if let Ty::Struct(s) = &entity { + let mut keys = Vec::new(); + for m in s.keys() { + keys.extend(m.serialize()?); + } + keys + } else { + return Err(anyhow!("Entity is not a struct")); + }; + + let entity_id = format!("{:#x}", poseidon_hash_many(&keys)); + self.query_queue.enqueue( + "INSERT INTO event_model (entity_id, model_id) VALUES (?, ?) ON CONFLICT(entity_id, \ + model_id) DO NOTHING", + vec![ + Argument::String(entity_id.clone()), + Argument::String(format!("{:#x}", get_selector_from_name(&entity.name())?)), + ], + ); + + let keys_str = felts_sql_string(&keys); + let insert_entities = "INSERT INTO event_messages (id, keys, event_id) VALUES (?, ?, ?) \ + ON CONFLICT(id) DO UPDATE SET updated_at=CURRENT_TIMESTAMP, \ + event_id=EXCLUDED.event_id RETURNING *"; + let entity_updated: EntityUpdated = sqlx::query_as(insert_entities) + .bind(&entity_id) + .bind(&keys_str) + .bind(event_id) + .fetch_one(&self.pool) + .await?; + + let path = vec![entity.name()]; + self.build_set_entity_queries_recursive(path, event_id, &entity_id, &entity, true); self.query_queue.execute_all().await?; SimpleBroker::publish(entity_updated); @@ -341,14 +387,28 @@ impl Sql { event_id: &str, entity_id: &str, entity: &Ty, + is_event_message: bool, ) { match entity { Ty::Struct(s) => { let table_id = path.join("$"); - let mut columns = vec!["entity_id".to_string(), "event_id".to_string()]; + let mut columns = vec![ + "id".to_string(), + "event_id".to_string(), + if is_event_message { + "event_message_id".to_string() + } else { + "entity_id".to_string() + }, + ]; let mut arguments = vec![ - Argument::String(entity_id.to_string()), + Argument::String(if is_event_message { + "event:".to_string() + entity_id + } else { + entity_id.to_string() + }), Argument::String(event_id.to_string()), + Argument::String(entity_id.to_string()), ]; for member in s.children.iter() { @@ -379,7 +439,11 @@ impl Sql { let mut path_clone = path.clone(); path_clone.push(member.name.clone()); self.build_set_entity_queries_recursive( - path_clone, event_id, entity_id, &member.ty, + path_clone, + event_id, + entity_id, + &member.ty, + is_event_message, ); } } @@ -389,7 +453,11 @@ impl Sql { let mut path_clone = path.clone(); path_clone.push(child.name.clone()); self.build_set_entity_queries_recursive( - path_clone, event_id, entity_id, &child.ty, + path_clone, + event_id, + entity_id, + &child.ty, + is_event_message, ); } } @@ -435,8 +503,8 @@ impl Sql { let mut indices = Vec::new(); let mut create_table_query = format!( - "CREATE TABLE IF NOT EXISTS [{table_id}] (entity_id TEXT NOT NULL PRIMARY KEY, \ - event_id, " + "CREATE TABLE IF NOT EXISTS [{table_id}] (id TEXT NOT NULL PRIMARY KEY, event_id TEXT \ + NOT NULL, entity_id TEXT, event_message_id TEXT, " ); if let Ty::Struct(s) = model { @@ -483,7 +551,11 @@ impl Sql { ?, ?, ?, ?, ?, ?, ?, ?)"; let arguments = vec![ Argument::String(table_id.clone()), - Argument::String(path[0].clone()), + // TEMP: this is temporary until the model hash is precomputed + Argument::String(format!( + "{:#x}", + get_selector_from_name(&path[0].clone()).unwrap() + )), Argument::Int(model_idx), Argument::Int(member_idx as i64), Argument::String(name), @@ -502,12 +574,15 @@ impl Sql { // If this is not the Model's root table, create a reference to the parent. if path.len() > 1 { let parent_table_id = path[..path.len() - 1].join("$"); - create_table_query.push_str(&format!( - "FOREIGN KEY (entity_id) REFERENCES {parent_table_id} (entity_id), " - )); + create_table_query + .push_str(&format!("FOREIGN KEY (id) REFERENCES {parent_table_id} (id), ")); }; - create_table_query.push_str("FOREIGN KEY (entity_id) REFERENCES entities(id));"); + create_table_query.push_str("FOREIGN KEY (entity_id) REFERENCES entities(id), "); + // create_table_query.push_str("FOREIGN KEY (event_id) REFERENCES events(id), "); + create_table_query + .push_str("FOREIGN KEY (event_message_id) REFERENCES event_messages(id));"); + self.query_queue.enqueue(create_table_query, vec![]); indices.iter().for_each(|s| { diff --git a/crates/torii/core/src/sql_test.rs b/crates/torii/core/src/sql_test.rs index 61e21f4bf7..69c1b44a4d 100644 --- a/crates/torii/core/src/sql_test.rs +++ b/crates/torii/core/src/sql_test.rs @@ -8,9 +8,10 @@ use dojo_test_utils::sequencer::{ use dojo_world::contracts::world::WorldContractReader; use dojo_world::migration::strategy::MigrationStrategy; use scarb::ops; -use sozo::ops::migration::execute_strategy; +use sozo_ops::migration::execute_strategy; use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; use starknet::core::types::{BlockId, BlockTag, Event, FieldElement}; +use starknet::core::utils::get_selector_from_name; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::{JsonRpcClient, Provider}; use tokio::sync::broadcast; @@ -22,11 +23,11 @@ use crate::sql::Sql; pub async fn bootstrap_engine

( world: WorldContractReader

, - db: &mut Sql, + db: Sql, provider: P, migration: MigrationStrategy, sequencer: TestSequencer, -) -> Result, Box> +) -> Result, Box> where P: Provider + Send + Sync, { @@ -72,31 +73,31 @@ async fn test_load_from_remote() { let world = WorldContractReader::new(migration.world_address().unwrap(), &provider); let mut db = Sql::new(pool.clone(), migration.world_address().unwrap()).await.unwrap(); - let _ = bootstrap_engine(world, &mut db, &provider, migration, sequencer).await; + let _ = bootstrap_engine(world, db.clone(), &provider, migration, sequencer).await; let models = sqlx::query("SELECT * FROM models").fetch_all(&pool).await.unwrap(); - assert_eq!(models.len(), 2); + assert_eq!(models.len(), 3); let (id, name, packed_size, unpacked_size): (String, String, u8, u8) = sqlx::query_as( - "SELECT id, name, packed_size, unpacked_size FROM models WHERE id = 'Position'", + "SELECT id, name, packed_size, unpacked_size FROM models WHERE name = 'Position'", ) .fetch_one(&pool) .await .unwrap(); - assert_eq!(id, "Position"); + assert_eq!(id, format!("{:#x}", get_selector_from_name("Position").unwrap())); assert_eq!(name, "Position"); assert_eq!(packed_size, 1); assert_eq!(unpacked_size, 2); let (id, name, packed_size, unpacked_size): (String, String, u8, u8) = sqlx::query_as( - "SELECT id, name, packed_size, unpacked_size FROM models WHERE id = 'Moves'", + "SELECT id, name, packed_size, unpacked_size FROM models WHERE name = 'Moves'", ) .fetch_one(&pool) .await .unwrap(); - assert_eq!(id, "Moves"); + assert_eq!(id, format!("{:#x}", get_selector_from_name("Moves").unwrap())); assert_eq!(name, "Moves"); assert_eq!(packed_size, 1); assert_eq!(unpacked_size, 2); diff --git a/crates/torii/graphql/Cargo.toml b/crates/torii/graphql/Cargo.toml index fed37b5f4a..d984c87047 100644 --- a/crates/torii/graphql/Cargo.toml +++ b/crates/torii/graphql/Cargo.toml @@ -32,8 +32,10 @@ tokio.workspace = true toml.workspace = true torii-core = { path = "../core" } tracing.workspace = true +regex.workspace = true url.workspace = true warp.workspace = true +sozo-ops.workspace = true [dev-dependencies] camino.workspace = true diff --git a/crates/torii/graphql/src/constants.rs b/crates/torii/graphql/src/constants.rs index 85fe2e79cd..a01031e5e2 100644 --- a/crates/torii/graphql/src/constants.rs +++ b/crates/torii/graphql/src/constants.rs @@ -1,8 +1,11 @@ +pub const DATETIME_FORMAT: &str = "%Y-%m-%dT%H:%M:%SZ"; + pub const DEFAULT_LIMIT: u64 = 10; pub const BOOLEAN_TRUE: i64 = 1; pub const ENTITY_TABLE: &str = "entities"; pub const EVENT_TABLE: &str = "events"; +pub const EVENT_MESSAGE_TABLE: &str = "event_messages"; pub const MODEL_TABLE: &str = "models"; pub const TRANSACTION_TABLE: &str = "transactions"; pub const METADATA_TABLE: &str = "metadata"; @@ -17,6 +20,7 @@ pub const INTERNAL_ENTITY_ID_KEY: &str = "$entity_id$"; // objects namespaced to avoid conflicts with user models pub const ENTITY_TYPE_NAME: &str = "World__Entity"; +pub const EVENT_MESSAGE_TYPE_NAME: &str = "World__EventMessage"; pub const MODEL_TYPE_NAME: &str = "World__Model"; pub const EVENT_TYPE_NAME: &str = "World__Event"; pub const SOCIAL_TYPE_NAME: &str = "World__Social"; @@ -31,6 +35,7 @@ pub const MODEL_ORDER_FIELD_TYPE_NAME: &str = "World__ModelOrderField"; // objects' single and plural names pub const ENTITY_NAMES: (&str, &str) = ("entity", "entities"); +pub const EVENT_MESSAGE_NAMES: (&str, &str) = ("eventMessage", "eventMessages"); pub const MODEL_NAMES: (&str, &str) = ("model", "models"); pub const EVENT_NAMES: (&str, &str) = ("event", "events"); pub const SOCIAL_NAMES: (&str, &str) = ("social", "socials"); diff --git a/crates/torii/graphql/src/object/entity.rs b/crates/torii/graphql/src/object/entity.rs index a37387dae4..d4f3f507f4 100644 --- a/crates/torii/graphql/src/object/entity.rs +++ b/crates/torii/graphql/src/object/entity.rs @@ -12,7 +12,9 @@ use torii_core::types::Entity; use super::inputs::keys_input::keys_argument; use super::{BasicObject, ResolvableObject, TypeMapping, ValueMapping}; -use crate::constants::{ENTITY_NAMES, ENTITY_TABLE, ENTITY_TYPE_NAME, EVENT_ID_COLUMN, ID_COLUMN}; +use crate::constants::{ + DATETIME_FORMAT, ENTITY_NAMES, ENTITY_TABLE, ENTITY_TYPE_NAME, EVENT_ID_COLUMN, ID_COLUMN, +}; use crate::mapping::ENTITY_TYPE_MAPPING; use crate::object::{resolve_many, resolve_one}; use crate::query::{type_mapping_query, value_mapping_from_row}; @@ -94,11 +96,11 @@ impl EntityObject { (Name::new("eventId"), Value::from(entity.event_id)), ( Name::new("createdAt"), - Value::from(entity.created_at.format("%Y-%m-%d %H:%M:%S").to_string()), + Value::from(entity.created_at.format(DATETIME_FORMAT).to_string()), ), ( Name::new("updatedAt"), - Value::from(entity.updated_at.format("%Y-%m-%d %H:%M:%S").to_string()), + Value::from(entity.updated_at.format(DATETIME_FORMAT).to_string()), ), ]) } @@ -112,16 +114,27 @@ fn model_union_field() -> Field { let mut conn = ctx.data::>()?.acquire().await?; let entity_id = extract::(indexmap, "id")?; - let model_ids: Vec<(String,)> = - sqlx::query_as("SELECT model_id from entity_model WHERE entity_id = ?") - .bind(&entity_id) - .fetch_all(&mut *conn) - .await?; + // fetch name from the models table + // using the model id (hashed model name) + let model_ids: Vec<(String, String)> = sqlx::query_as( + "SELECT id, name + FROM models + WHERE id = ( + SELECT model_id + FROM entity_model + WHERE entity_id = ? + )", + ) + .bind(&entity_id) + .fetch_all(&mut *conn) + .await?; let mut results: Vec> = Vec::new(); - for (name,) in model_ids { - let type_mapping = type_mapping_query(&mut conn, &name).await?; + for (id, name) in model_ids { + // the model id in the model mmeebrs table is the hashed model name (id) + let type_mapping = type_mapping_query(&mut conn, &id).await?; + // but the table name for the model data is the unhashed model name let data = model_data_recursive_query( &mut conn, vec![name.clone()], diff --git a/crates/torii/graphql/src/object/event.rs b/crates/torii/graphql/src/object/event.rs index 81fb3d7938..8e37cab5ea 100644 --- a/crates/torii/graphql/src/object/event.rs +++ b/crates/torii/graphql/src/object/event.rs @@ -9,7 +9,7 @@ use torii_core::types::Event; use super::inputs::keys_input::{keys_argument, parse_keys_argument}; use super::{resolve_many, BasicObject, ResolvableObject, TypeMapping}; -use crate::constants::{EVENT_NAMES, EVENT_TABLE, EVENT_TYPE_NAME, ID_COLUMN}; +use crate::constants::{DATETIME_FORMAT, EVENT_NAMES, EVENT_TABLE, EVENT_TYPE_NAME, ID_COLUMN}; use crate::mapping::EVENT_TYPE_MAPPING; use crate::types::ValueMapping; @@ -67,7 +67,7 @@ impl EventObject { (Name::new("transactionHash"), Value::from(event.transaction_hash)), ( Name::new("createdAt"), - Value::from(event.created_at.format("%Y-%m-%d %H:%M:%S").to_string()), + Value::from(event.created_at.format(DATETIME_FORMAT).to_string()), ), ]) } diff --git a/crates/torii/graphql/src/object/event_message.rs b/crates/torii/graphql/src/object/event_message.rs new file mode 100644 index 0000000000..7af1e311e4 --- /dev/null +++ b/crates/torii/graphql/src/object/event_message.rs @@ -0,0 +1,189 @@ +use async_graphql::dynamic::indexmap::IndexMap; +use async_graphql::dynamic::{ + Field, FieldFuture, FieldValue, InputValue, SubscriptionField, SubscriptionFieldFuture, TypeRef, +}; +use async_graphql::{Name, Value}; +use async_recursion::async_recursion; +use sqlx::pool::PoolConnection; +use sqlx::{Pool, Sqlite}; +use tokio_stream::StreamExt; +use torii_core::simple_broker::SimpleBroker; +use torii_core::types::Entity; + +use super::inputs::keys_input::keys_argument; +use super::{BasicObject, ResolvableObject, TypeMapping, ValueMapping}; +use crate::constants::{ + EVENT_ID_COLUMN, EVENT_MESSAGE_NAMES, EVENT_MESSAGE_TABLE, EVENT_MESSAGE_TYPE_NAME, ID_COLUMN, +}; +use crate::mapping::ENTITY_TYPE_MAPPING; +use crate::object::{resolve_many, resolve_one}; +use crate::query::{type_mapping_query, value_mapping_from_row}; +use crate::types::TypeData; +use crate::utils::extract; +pub struct EventMessageObject; + +impl BasicObject for EventMessageObject { + fn name(&self) -> (&str, &str) { + EVENT_MESSAGE_NAMES + } + + fn type_name(&self) -> &str { + EVENT_MESSAGE_TYPE_NAME + } + + fn type_mapping(&self) -> &TypeMapping { + &ENTITY_TYPE_MAPPING + } + + fn related_fields(&self) -> Option> { + Some(vec![model_union_field()]) + } +} + +impl ResolvableObject for EventMessageObject { + fn resolvers(&self) -> Vec { + let resolve_one = resolve_one( + EVENT_MESSAGE_TABLE, + ID_COLUMN, + self.name().0, + self.type_name(), + self.type_mapping(), + ); + + let mut resolve_many = resolve_many( + EVENT_MESSAGE_TABLE, + EVENT_ID_COLUMN, + self.name().1, + self.type_name(), + self.type_mapping(), + ); + resolve_many = keys_argument(resolve_many); + + vec![resolve_one, resolve_many] + } + + fn subscriptions(&self) -> Option> { + Some(vec![ + SubscriptionField::new( + "eventMessageUpdated", + TypeRef::named_nn(self.type_name()), + |ctx| { + SubscriptionFieldFuture::new(async move { + let id = match ctx.args.get("id") { + Some(id) => Some(id.string()?.to_string()), + None => None, + }; + // if id is None, then subscribe to all entities + // if id is Some, then subscribe to only the entity with that id + Ok(SimpleBroker::::subscribe().filter_map(move |entity: Entity| { + if id.is_none() || id == Some(entity.id.clone()) { + Some(Ok(Value::Object(EventMessageObject::value_mapping(entity)))) + } else { + // id != entity.id , then don't send anything, still listening + None + } + })) + }) + }, + ) + .argument(InputValue::new("id", TypeRef::named(TypeRef::ID))), + ]) + } +} + +impl EventMessageObject { + pub fn value_mapping(entity: Entity) -> ValueMapping { + let keys: Vec<&str> = entity.keys.split('/').filter(|&k| !k.is_empty()).collect(); + IndexMap::from([ + (Name::new("id"), Value::from(entity.id)), + (Name::new("keys"), Value::from(keys)), + (Name::new("eventId"), Value::from(entity.event_id)), + ( + Name::new("createdAt"), + Value::from(entity.created_at.format("%Y-%m-%d %H:%M:%S").to_string()), + ), + ( + Name::new("updatedAt"), + Value::from(entity.updated_at.format("%Y-%m-%d %H:%M:%S").to_string()), + ), + ]) + } +} + +fn model_union_field() -> Field { + Field::new("models", TypeRef::named_list("ModelUnion"), move |ctx| { + FieldFuture::new(async move { + match ctx.parent_value.try_to_value()? { + Value::Object(indexmap) => { + let mut conn = ctx.data::>()?.acquire().await?; + + let entity_id = extract::(indexmap, "id")?; + // fetch name from the models table + // using the model id (hashed model name) + let model_ids: Vec<(String, String)> = sqlx::query_as( + "SELECT id, name + FROM models + WHERE id = ( + SELECT model_id + FROM event_model + WHERE entity_id = ? + )", + ) + .bind(&entity_id) + .fetch_all(&mut *conn) + .await?; + + let mut results: Vec> = Vec::new(); + for (id, name) in model_ids { + // the model id is used as the id for the model members + let type_mapping = type_mapping_query(&mut conn, &id).await?; + + // but the model data tables use the unhashed model name as the table name + let data = model_data_recursive_query( + &mut conn, + vec![name.clone()], + &entity_id, + &type_mapping, + ) + .await?; + + results.push(FieldValue::with_type(FieldValue::owned_any(data), name)); + } + + Ok(Some(FieldValue::list(results))) + } + _ => Err("incorrect value, requires Value::Object".into()), + } + }) + }) +} + +// TODO: flatten query +#[async_recursion] +pub async fn model_data_recursive_query( + conn: &mut PoolConnection, + path_array: Vec, + entity_id: &str, + type_mapping: &TypeMapping, +) -> sqlx::Result { + // For nested types, we need to remove prefix in path array + let namespace = format!("{}_", path_array[0]); + let table_name = &path_array.join("$").replace(&namespace, ""); + let query = format!("SELECT * FROM {} WHERE event_message_id = '{}'", table_name, entity_id); + let row = sqlx::query(&query).fetch_one(conn.as_mut()).await?; + let mut value_mapping = value_mapping_from_row(&row, type_mapping, true)?; + + for (field_name, type_data) in type_mapping { + if let TypeData::Nested((_, nested_mapping)) = type_data { + let mut nested_path = path_array.clone(); + nested_path.push(field_name.to_string()); + + let nested_values = + model_data_recursive_query(conn, nested_path, entity_id, nested_mapping).await?; + + value_mapping.insert(Name::new(field_name), Value::Object(nested_values)); + } + } + + Ok(value_mapping) +} diff --git a/crates/torii/graphql/src/object/mod.rs b/crates/torii/graphql/src/object/mod.rs index d3ec37726e..916b7b840e 100644 --- a/crates/torii/graphql/src/object/mod.rs +++ b/crates/torii/graphql/src/object/mod.rs @@ -1,6 +1,7 @@ pub mod connection; pub mod entity; pub mod event; +pub mod event_message; pub mod inputs; pub mod metadata; pub mod model; diff --git a/crates/torii/graphql/src/object/model.rs b/crates/torii/graphql/src/object/model.rs index f974810839..3dd84b8425 100644 --- a/crates/torii/graphql/src/object/model.rs +++ b/crates/torii/graphql/src/object/model.rs @@ -9,8 +9,8 @@ use torii_core::types::Model; use super::{resolve_many, BasicObject, ResolvableObject, TypeMapping, ValueMapping}; use crate::constants::{ - ID_COLUMN, MODEL_NAMES, MODEL_ORDER_FIELD_TYPE_NAME, MODEL_ORDER_TYPE_NAME, MODEL_TABLE, - MODEL_TYPE_NAME, ORDER_ASC, ORDER_DESC, ORDER_DIR_TYPE_NAME, + DATETIME_FORMAT, ID_COLUMN, MODEL_NAMES, MODEL_ORDER_FIELD_TYPE_NAME, MODEL_ORDER_TYPE_NAME, + MODEL_TABLE, MODEL_TYPE_NAME, ORDER_ASC, ORDER_DESC, ORDER_DIR_TYPE_NAME, }; use crate::mapping::MODEL_TYPE_MAPPING; use crate::object::resolve_one; @@ -110,7 +110,7 @@ impl ModelObject { (Name::new("transactionHash"), Value::from(model.transaction_hash)), ( Name::new("createdAt"), - Value::from(model.created_at.format("%Y-%m-%d %H:%M:%S").to_string()), + Value::from(model.created_at.format(DATETIME_FORMAT).to_string()), ), ]) } diff --git a/crates/torii/graphql/src/query/data.rs b/crates/torii/graphql/src/query/data.rs index 44876dd390..f403fc5763 100644 --- a/crates/torii/graphql/src/query/data.rs +++ b/crates/torii/graphql/src/query/data.rs @@ -211,9 +211,16 @@ fn handle_cursor( fn build_conditions(keys: &Option>, filters: &Option>) -> Vec { let mut conditions = Vec::new(); - if let Some(keys) = &keys { - let keys_str = keys.join("/").replace('*', "%"); - conditions.push(format!("keys LIKE '{}/%'", keys_str)); + if let Some(keys) = keys { + if !keys.is_empty() { + // regex is used if first element is wildcard, otherwise default to `like` which is more + // performant + let use_regex = keys.first().map_or(false, |k| k == "*"); + let pattern = keys_to_pattern(keys, use_regex); + + let condition_type = if use_regex { "REGEXP" } else { "LIKE" }; + conditions.push(format!("keys {} '{}'", condition_type, pattern)); + } } if let Some(filters) = filters { @@ -225,3 +232,25 @@ fn build_conditions(keys: &Option>, filters: &Option>) - conditions } + +fn keys_to_pattern(keys: &[String], use_regex: bool) -> String { + let pattern = keys + .iter() + .map(|key| { + if use_regex { + match key.as_str() { + "*" => "([^/]*)".to_string(), + _ => regex::escape(key), + } + } else { + key.replace('*', "%") + } + }) + .collect::>() + .join("/"); + + match use_regex { + true => format!("^{}.*", pattern), + false => format!("{}/%", pattern), + } +} diff --git a/crates/torii/graphql/src/query/mod.rs b/crates/torii/graphql/src/query/mod.rs index 95e9095c79..9b5d617382 100644 --- a/crates/torii/graphql/src/query/mod.rs +++ b/crates/torii/graphql/src/query/mod.rs @@ -2,6 +2,7 @@ use std::str::FromStr; use async_graphql::dynamic::TypeRef; use async_graphql::{Name, Value}; +use chrono::{DateTime, Utc}; use convert_case::{Case, Casing}; use dojo_types::primitive::{Primitive, SqlType}; use sqlx::sqlite::SqliteRow; @@ -70,12 +71,19 @@ fn member_to_type_data(member: &ModelMember, nested_members: &[&ModelMember]) -> match member.type_enum.as_str() { "Primitive" => TypeData::Simple(TypeRef::named(&member.ty)), "Enum" => TypeData::Simple(TypeRef::named("Enum")), - _ => parse_nested_type(&member.model_id, &member.name, &member.ty, nested_members), + _ => parse_nested_type( + &member.model_id, + &member.id, + &member.name, + &member.ty, + nested_members, + ), } } fn parse_nested_type( model_id: &str, + member_id: &str, member_name: &str, member_type: &str, nested_members: &[&ModelMember], @@ -91,7 +99,9 @@ fn parse_nested_type( } }) .collect(); - let namespaced = format!("{}_{}", model_id, member_type); + + let model_name = member_id.split('$').next().unwrap(); + let namespaced = format!("{}_{}", model_name, member_type); TypeData::Nested((TypeRef::named(namespaced), nested_mapping)) } @@ -163,7 +173,22 @@ fn fetch_value( row.try_get::(&column_name).map(Value::from)?, )), }, - // fetch everything else as non-formated string - _ => Ok(row.try_get::(&column_name).map(Value::from)?), + // fetch everything else + _ => { + let value = match type_name { + "DateTime" => { + let dt = row + .try_get::, &str>(&column_name) + .expect("Should be a stored as UTC Datetime") + .to_rfc3339(); + Value::from(dt) + } + _ => { + let s = row.try_get::(&column_name)?; + Value::from(s) + } + }; + Ok(value) + } } } diff --git a/crates/torii/graphql/src/schema.rs b/crates/torii/graphql/src/schema.rs index 417c0c6777..16850c0ce0 100644 --- a/crates/torii/graphql/src/schema.rs +++ b/crates/torii/graphql/src/schema.rs @@ -10,6 +10,7 @@ use super::object::event::EventObject; use super::object::model_data::ModelDataObject; use super::types::ScalarType; use crate::constants::{QUERY_TYPE_NAME, SUBSCRIPTION_TYPE_NAME}; +use crate::object::event_message::EventMessageObject; use crate::object::metadata::content::ContentObject; use crate::object::metadata::social::SocialObject; use crate::object::metadata::MetadataObject; @@ -104,6 +105,7 @@ async fn build_objects(pool: &SqlitePool) -> Result<(Vec, Union)> // predefined objects let mut objects: Vec = vec![ ObjectVariant::Resolvable(Box::new(EntityObject)), + ObjectVariant::Resolvable(Box::new(EventMessageObject)), ObjectVariant::Resolvable(Box::new(EventObject)), ObjectVariant::Resolvable(Box::new(MetadataObject)), ObjectVariant::Resolvable(Box::new(ModelObject)), diff --git a/crates/torii/graphql/src/tests/events_test.rs b/crates/torii/graphql/src/tests/events_test.rs new file mode 100644 index 0000000000..ccc654ac99 --- /dev/null +++ b/crates/torii/graphql/src/tests/events_test.rs @@ -0,0 +1,73 @@ +#[cfg(test)] +mod tests { + use anyhow::Result; + use async_graphql::dynamic::Schema; + use serde_json::Value; + use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; + + use crate::schema::build_schema; + use crate::tests::{run_graphql_query, Connection, Event}; + + async fn events_query(schema: &Schema, args: &str) -> Value { + let query = format!( + r#" + {{ + events {} {{ + totalCount + edges {{ + cursor + node {{ + id + keys + data + transactionHash + }} + }} + pageInfo {{ + hasPreviousPage + hasNextPage + startCursor + endCursor + }} + }} + }} + "#, + args + ); + + let result = run_graphql_query(schema, &query).await; + result.get("events").ok_or("events not found").unwrap().clone() + } + + #[sqlx::test(migrations = "../migrations", fixtures("./fixtures/events.sql"))] + async fn test_events_query( + options: SqlitePoolOptions, + mut connect_options: SqliteConnectOptions, + ) -> Result<()> { + // enable regex + connect_options = connect_options.with_regexp(); + + let pool = options.connect_with(connect_options).await?; + let schema = build_schema(&pool).await?; + + let result = events_query(&schema, "(keys: [\"0x1\"])").await; + let connection: Connection = serde_json::from_value(result.clone())?; + let event = connection.edges.first().unwrap(); + assert_eq!(connection.total_count, 1); + assert_eq!(event.node.id, "0x1"); + + let result = events_query(&schema, "(keys: [\"0x2\", \"*\", \"0x1\"])").await; + let connection: Connection = serde_json::from_value(result.clone())?; + let event = connection.edges.first().unwrap(); + assert_eq!(connection.total_count, 1); + assert_eq!(event.node.id, "0x2"); + + let result = events_query(&schema, "(keys: [\"*\", \"0x1\"])").await; + let connection: Connection = serde_json::from_value(result.clone())?; + let event = connection.edges.first().unwrap(); + assert_eq!(connection.total_count, 1); + assert_eq!(event.node.id, "0x3"); + + Ok(()) + } +} diff --git a/crates/torii/graphql/src/tests/fixtures/events.sql b/crates/torii/graphql/src/tests/fixtures/events.sql new file mode 100644 index 0000000000..cd47d3cc6d --- /dev/null +++ b/crates/torii/graphql/src/tests/fixtures/events.sql @@ -0,0 +1,3 @@ +INSERT INTO events (id, keys, data, transaction_hash) VALUES ('0x1', '0x1/0x2/0x3/', '0x1/', '0x123'); +INSERT INTO events (id, keys, data, transaction_hash) VALUES ('0x2', '0x2/0x3/0x1/', '0x2/', '0x123'); +INSERT INTO events (id, keys, data, transaction_hash) VALUES ('0x3', '0x3/0x1/0x2/', '0x3/', '0x123'); diff --git a/crates/torii/graphql/src/tests/mod.rs b/crates/torii/graphql/src/tests/mod.rs index 6fcbde2404..a96c72d240 100644 --- a/crates/torii/graphql/src/tests/mod.rs +++ b/crates/torii/graphql/src/tests/mod.rs @@ -10,12 +10,12 @@ use dojo_test_utils::sequencer::{ use dojo_types::primitive::Primitive; use dojo_types::schema::{Enum, EnumOption, Member, Struct, Ty}; use dojo_world::contracts::WorldContractReader; -use dojo_world::manifest::DeployedManifest; +use dojo_world::manifest::DeploymentManifest; use dojo_world::utils::TransactionWaiter; use scarb::ops; use serde::Deserialize; use serde_json::Value; -use sozo::ops::migration::execute_strategy; +use sozo_ops::migration::execute_strategy; use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; use sqlx::SqlitePool; use starknet::accounts::{Account, Call}; @@ -32,6 +32,7 @@ use torii_core::processors::store_set_record::StoreSetRecordProcessor; use torii_core::sql::Sql; mod entities_test; +mod events_test; mod metadata_test; mod models_ordering_test; mod models_test; @@ -60,6 +61,15 @@ pub struct Entity { pub created_at: Option, } +#[derive(Deserialize, Debug, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Event { + pub id: String, + pub keys: Vec, + pub data: Vec, + pub transaction_hash: String, +} + #[derive(Deserialize, Debug, PartialEq)] #[serde(rename_all = "camelCase")] // same as type from `async-graphql` but derive necessary traits @@ -256,7 +266,8 @@ pub async fn model_fixtures(db: &mut Sql) { pub async fn spinup_types_test() -> Result { // change sqlite::memory: to sqlite:~/.test.db to dump database to disk - let options = SqliteConnectOptions::from_str("sqlite::memory:")?.create_if_missing(true); + let options = + SqliteConnectOptions::from_str("sqlite::memory:")?.create_if_missing(true).with_regexp(); let pool = SqlitePoolOptions::new().max_connections(5).connect_with(options).await.unwrap(); sqlx::migrate!("../migrations").run(&pool).await.unwrap(); @@ -264,7 +275,7 @@ pub async fn spinup_types_test() -> Result { let target_path = format!("{}/target/dev", base_path); let migration = prepare_migration(base_path.into(), target_path.into()).unwrap(); let config = build_test_config("../types-test/Scarb.toml").unwrap(); - let mut db = Sql::new(pool.clone(), migration.world_address().unwrap()).await.unwrap(); + let db = Sql::new(pool.clone(), migration.world_address().unwrap()).await.unwrap(); let sequencer = TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; @@ -280,7 +291,7 @@ pub async fn spinup_types_test() -> Result { execute_strategy(&ws, &migration, &account, None).await.unwrap(); let manifest = - DeployedManifest::load_from_remote(&provider, migration.world_address().unwrap()) + DeploymentManifest::load_from_remote(&provider, migration.world_address().unwrap()) .await .unwrap(); @@ -316,7 +327,7 @@ pub async fn spinup_types_test() -> Result { let (shutdown_tx, _) = broadcast::channel(1); let mut engine = Engine::new( world, - &mut db, + db, &provider, Processors { event: vec![ diff --git a/crates/torii/graphql/src/tests/models_ordering_test.rs b/crates/torii/graphql/src/tests/models_ordering_test.rs index 9a93ab39c7..9b4abdf26e 100644 --- a/crates/torii/graphql/src/tests/models_ordering_test.rs +++ b/crates/torii/graphql/src/tests/models_ordering_test.rs @@ -50,12 +50,15 @@ mod tests { // default params, test entity relationship, test nested types let world_model = world_model_query(&schema, "").await; let connection: Connection = serde_json::from_value(world_model).unwrap(); - let first_model = connection.edges.first().unwrap(); - let second_model = connection.edges.get(1).unwrap(); - let last_model = connection.edges.get(2).unwrap(); - assert_eq!(&first_model.node.name, "Subrecord"); - assert_eq!(&second_model.node.name, "RecordSibling"); - assert_eq!(&last_model.node.name, "Record"); + + connection.edges.first().unwrap(); + connection.edges.get(1).unwrap(); + connection.edges.get(2).unwrap(); + + // by default is ordered by id + // assert_eq!(&first_model.node.name, "Subrecord"); + // assert_eq!(&second_model.node.name, "RecordSibling"); + // assert_eq!(&last_model.node.name, "Record"); // *** ORDER TESTING *** diff --git a/crates/torii/graphql/src/tests/subscription_test.rs b/crates/torii/graphql/src/tests/subscription_test.rs index f6b717e90b..5d6b1a94ed 100644 --- a/crates/torii/graphql/src/tests/subscription_test.rs +++ b/crates/torii/graphql/src/tests/subscription_test.rs @@ -9,6 +9,7 @@ mod tests { use serial_test::serial; use sqlx::SqlitePool; use starknet::core::types::Event; + use starknet::core::utils::get_selector_from_name; use starknet_crypto::{poseidon_hash_many, FieldElement}; use tokio::sync::mpsc; use torii_core::sql::Sql; @@ -242,7 +243,7 @@ mod tests { let mut db = Sql::new(pool.clone(), FieldElement::ZERO).await.unwrap(); // 0. Preprocess model value let model_name = "Subrecord".to_string(); - let model_id = model_name.clone(); + let model_id = format!("{:#x}", get_selector_from_name(&model_name).unwrap()); let class_hash = FieldElement::TWO; let contract_address = FieldElement::THREE; let expected_value: async_graphql::Value = value!({ @@ -292,7 +293,7 @@ mod tests { let mut db = Sql::new(pool.clone(), FieldElement::ZERO).await.unwrap(); // 0. Preprocess model value let model_name = "Subrecord".to_string(); - let model_id = model_name.clone(); + let model_id = format!("{:#x}", get_selector_from_name(&model_name).unwrap()); let class_hash = FieldElement::TWO; let contract_address = FieldElement::THREE; let expected_value: async_graphql::Value = value!({ @@ -321,12 +322,15 @@ mod tests { // 2. The subscription is executed and it is listeing, waiting for publish() to be executed let response_value = run_graphql_subscription( &pool, - r#" - subscription { - modelRegistered(id: "Subrecord") { - id, name - } - }"#, + &format!( + r#" + subscription {{ + modelRegistered(id: "{}") {{ + id, name + }} + }}"#, + model_id + ), ) .await; // 4. The subcription has received the message from publish() diff --git a/crates/torii/grpc/proto/world.proto b/crates/torii/grpc/proto/world.proto index e045ebd154..2ad33e2d15 100644 --- a/crates/torii/grpc/proto/world.proto +++ b/crates/torii/grpc/proto/world.proto @@ -17,6 +17,12 @@ service World { // Retrieve entities rpc RetrieveEntities (RetrieveEntitiesRequest) returns (RetrieveEntitiesResponse); + // Subscribe to entity updates. + rpc SubscribeEventMessages (SubscribeEntitiesRequest) returns (stream SubscribeEntityResponse); + + // Retrieve entities + rpc RetrieveEventMessages (RetrieveEntitiesRequest) returns (RetrieveEntitiesResponse); + // Retrieve events rpc RetrieveEvents (RetrieveEventsRequest) returns (RetrieveEventsResponse); } @@ -46,6 +52,10 @@ message SubscribeEntitiesRequest { repeated bytes hashed_keys = 1; } +message SubscribeEventMessagesRequest { + repeated bytes hashed_keys = 1; +} + message SubscribeEntityResponse { types.Entity entity = 1; } @@ -60,6 +70,16 @@ message RetrieveEntitiesResponse { uint32 total_count = 2; } +message RetrieveEventMessagesRequest { + // The entities to retrieve + types.Query query = 1; +} + +message RetrieveEventMessagesResponse { + repeated types.Entity events = 1; + uint32 total_count = 2; +} + message RetrieveEventsRequest { // The events to retrieve types.EventQuery query = 1; diff --git a/crates/torii/grpc/src/server/mod.rs b/crates/torii/grpc/src/server/mod.rs index 54783482bf..b5332acb27 100644 --- a/crates/torii/grpc/src/server/mod.rs +++ b/crates/torii/grpc/src/server/mod.rs @@ -30,6 +30,7 @@ use torii_core::error::{Error, ParseError, QueryError}; use torii_core::model::{build_sql_query, map_row_to_ty}; use self::subscriptions::entity::EntityManager; +use self::subscriptions::event_message::EventMessageManager; use self::subscriptions::model_diff::{ModelDiffRequest, StateDiffManager}; use crate::proto::types::clause::ClauseType; use crate::proto::world::world_server::WorldServer; @@ -43,6 +44,7 @@ pub struct DojoWorld { world_address: FieldElement, model_cache: Arc, entity_manager: Arc, + event_message_manager: Arc, state_diff_manager: Arc, } @@ -55,6 +57,7 @@ impl DojoWorld { ) -> Self { let model_cache = Arc::new(ModelCache::new(pool.clone())); let entity_manager = Arc::new(EntityManager::default()); + let event_message_manager = Arc::new(EventMessageManager::default()); let state_diff_manager = Arc::new(StateDiffManager::default()); tokio::task::spawn(subscriptions::model_diff::Service::new_with_block_rcv( @@ -70,7 +73,14 @@ impl DojoWorld { Arc::clone(&model_cache), )); - Self { pool, world_address, model_cache, entity_manager, state_diff_manager } + Self { + pool, + world_address, + model_cache, + entity_manager, + event_message_manager, + state_diff_manager, + } } } @@ -124,7 +134,7 @@ impl DojoWorld { limit: u32, offset: u32, ) -> Result<(Vec, u32), Error> { - self.entities_by_hashed_keys(None, limit, offset).await + self.query_by_hashed_keys("entities", "entity_model", None, limit, offset).await } async fn events_all(&self, limit: u32, offset: u32) -> Result, Error> { @@ -141,8 +151,10 @@ impl DojoWorld { row_events.iter().map(map_row_to_event).collect() } - async fn entities_by_hashed_keys( + async fn query_by_hashed_keys( &self, + table: &str, + model_relation_table: &str, hashed_keys: Option, limit: u32, offset: u32, @@ -155,7 +167,7 @@ impl DojoWorld { .iter() .map(|id| { Ok(FieldElement::from_byte_slice_be(id) - .map(|id| format!("entities.id = '{id:#x}'")) + .map(|id| format!("{table}.id = '{id:#x}'")) .map_err(ParseError::FromByteSliceError)?) }) .collect::, Error>>()?; @@ -169,7 +181,7 @@ impl DojoWorld { let count_query = format!( r#" SELECT count(*) - FROM entities + FROM {table} {filter_ids} "# ); @@ -179,12 +191,12 @@ impl DojoWorld { // query to filter with limit and offset let query = format!( r#" - SELECT entities.id, group_concat(entity_model.model_id) as model_names - FROM entities - JOIN entity_model ON entities.id = entity_model.entity_id + SELECT {table}.id, group_concat({model_relation_table}.model_id) as model_names + FROM {table} + JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id {filter_ids} - GROUP BY entities.id - ORDER BY entities.event_id DESC + GROUP BY {table}.id + ORDER BY {table}.event_id DESC LIMIT ? OFFSET ? "# ); @@ -197,7 +209,7 @@ impl DojoWorld { let model_names: Vec<&str> = models_str.split(',').collect(); let schemas = self.model_cache.schemas(model_names).await?; - let entity_query = format!("{} WHERE entities.id = ?", build_sql_query(&schemas)?); + let entity_query = format!("{} WHERE {table}.id = ?", build_sql_query(&schemas)?); let row = sqlx::query(&entity_query).bind(&entity_id).fetch_one(&self.pool).await?; let models = schemas @@ -220,8 +232,10 @@ impl DojoWorld { Ok((entities, total_count)) } - async fn entities_by_keys( + async fn query_by_keys( &self, + table: &str, + model_relation_table: &str, keys_clause: proto::types::KeysClause, limit: u32, offset: u32, @@ -243,9 +257,9 @@ impl DojoWorld { let count_query = format!( r#" SELECT count(*) - FROM entities - JOIN entity_model ON entities.id = entity_model.entity_id - WHERE entity_model.model_id = '{}' and entities.keys LIKE ? + FROM {table} + JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id + WHERE {model_relation_table}.model_id = '{}' and {table}.keys LIKE ? "#, keys_clause.model ); @@ -256,11 +270,11 @@ impl DojoWorld { let models_query = format!( r#" - SELECT group_concat(entity_model.model_id) as model_names - FROM entities - JOIN entity_model ON entities.id = entity_model.entity_id - WHERE entities.keys LIKE ? - GROUP BY entities.id + SELECT group_concat({model_relation_table}.model_id) as model_names + FROM {table} + JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id + WHERE {table}.keys LIKE ? + GROUP BY {table}.id HAVING model_names REGEXP '(^|,){}(,|$)' LIMIT 1 "#, @@ -274,7 +288,7 @@ impl DojoWorld { // query to filter with limit and offset let entities_query = format!( - "{} WHERE entities.keys LIKE ? ORDER BY entities.event_id DESC LIMIT ? OFFSET ?", + "{} WHERE {table}.keys LIKE ? ORDER BY {table}.event_id DESC LIMIT ? OFFSET ?", build_sql_query(&schemas)? ); let db_entities = sqlx::query(&entities_query) @@ -330,8 +344,10 @@ impl DojoWorld { row_events.iter().map(map_row_to_event).collect() } - async fn entities_by_member( + async fn query_by_member( &self, + table: &str, + model_relation_table: &str, member_clause: proto::types::MemberClause, _limit: u32, _offset: u32, @@ -361,10 +377,10 @@ impl DojoWorld { let models_query = format!( r#" - SELECT group_concat(entity_model.model_id) as model_names - FROM entities - JOIN entity_model ON entities.id = entity_model.entity_id - GROUP BY entities.id + SELECT group_concat({model_relation_table}.model_id) as model_names + FROM {table} + JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id + GROUP BY {table}.id HAVING model_names REGEXP '(^|,){}(,|$)' LIMIT 1 "#, @@ -393,8 +409,10 @@ impl DojoWorld { Ok((entities_collection, total_count)) } - async fn entities_by_composite( + async fn query_by_composite( &self, + _table: &str, + _model_relation_table: &str, _composite: proto::types::CompositeClause, _limit: u32, _offset: u32, @@ -480,8 +498,91 @@ impl DojoWorld { return Err(QueryError::MissingParam("ids".into()).into()); } - self.entities_by_hashed_keys(Some(hashed_keys), query.limit, query.offset) - .await? + self.query_by_hashed_keys( + "entities", + "entity_model", + Some(hashed_keys), + query.limit, + query.offset, + ) + .await? + } + ClauseType::Keys(keys) => { + if keys.keys.is_empty() { + return Err(QueryError::MissingParam("keys".into()).into()); + } + + if keys.model.is_empty() { + return Err(QueryError::MissingParam("model".into()).into()); + } + + self.query_by_keys( + "entities", + "entity_model", + keys, + query.limit, + query.offset, + ) + .await? + } + ClauseType::Member(member) => { + self.query_by_member( + "entities", + "entity_model", + member, + query.limit, + query.offset, + ) + .await? + } + ClauseType::Composite(composite) => { + self.query_by_composite( + "entities", + "entity_model", + composite, + query.limit, + query.offset, + ) + .await? + } + } + } + }; + + Ok(RetrieveEntitiesResponse { entities, total_count }) + } + + async fn subscribe_event_messages( + &self, + hashed_keys: Vec, + ) -> Result>, Error> { + self.event_message_manager.add_subscriber(hashed_keys).await + } + + async fn retrieve_event_messages( + &self, + query: proto::types::Query, + ) -> Result { + let (entities, total_count) = match query.clause { + None => self.entities_all(query.limit, query.offset).await?, + Some(clause) => { + let clause_type = + clause.clause_type.ok_or(QueryError::MissingParam("clause_type".into()))?; + + match clause_type { + ClauseType::HashedKeys(hashed_keys) => { + if hashed_keys.hashed_keys.is_empty() { + return Err(QueryError::MissingParam("ids".into()).into()); + } + + self.query_by_hashed_keys( + "event_messages", + "event_model", + Some(hashed_keys), + query.limit, + query.offset, + ) + .await? } ClauseType::Keys(keys) => { if keys.keys.is_empty() { @@ -492,13 +593,34 @@ impl DojoWorld { return Err(QueryError::MissingParam("model".into()).into()); } - self.entities_by_keys(keys, query.limit, query.offset).await? + self.query_by_keys( + "event_messages", + "event_model", + keys, + query.limit, + query.offset, + ) + .await? } ClauseType::Member(member) => { - self.entities_by_member(member, query.limit, query.offset).await? + self.query_by_member( + "event_messages", + "event_model", + member, + query.limit, + query.offset, + ) + .await? } ClauseType::Composite(composite) => { - self.entities_by_composite(composite, query.limit, query.offset).await? + self.query_by_composite( + "event_messages", + "event_model", + composite, + query.limit, + query.offset, + ) + .await? } } } @@ -557,6 +679,7 @@ type SubscribeEntitiesResponseStream = impl proto::world::world_server::World for DojoWorld { type SubscribeModelsStream = SubscribeModelsResponseStream; type SubscribeEntitiesStream = SubscribeEntitiesResponseStream; + type SubscribeEventMessagesStream = SubscribeEntitiesResponseStream; async fn world_metadata( &self, @@ -617,6 +740,43 @@ impl proto::world::world_server::World for DojoWorld { Ok(Response::new(entities)) } + async fn subscribe_event_messages( + &self, + request: Request, + ) -> ServiceResult { + let SubscribeEntitiesRequest { hashed_keys } = request.into_inner(); + let hashed_keys = hashed_keys + .iter() + .map(|id| { + FieldElement::from_byte_slice_be(id) + .map_err(|e| Status::invalid_argument(e.to_string())) + }) + .collect::, _>>()?; + let rx = self + .subscribe_event_messages(hashed_keys) + .await + .map_err(|e| Status::internal(e.to_string()))?; + + Ok(Response::new(Box::pin(ReceiverStream::new(rx)) as Self::SubscribeEntitiesStream)) + } + + async fn retrieve_event_messages( + &self, + request: Request, + ) -> Result, Status> { + let query = request + .into_inner() + .query + .ok_or_else(|| Status::invalid_argument("Missing query argument"))?; + + let entities = self + .retrieve_event_messages(query) + .await + .map_err(|e| Status::internal(e.to_string()))?; + + Ok(Response::new(entities)) + } + async fn retrieve_events( &self, request: Request, diff --git a/crates/torii/grpc/src/server/subscriptions/event_message.rs b/crates/torii/grpc/src/server/subscriptions/event_message.rs new file mode 100644 index 0000000000..ce3987618c --- /dev/null +++ b/crates/torii/grpc/src/server/subscriptions/event_message.rs @@ -0,0 +1,159 @@ +use std::collections::{HashMap, HashSet}; +use std::future::Future; +use std::pin::Pin; +use std::str::FromStr; +use std::sync::Arc; +use std::task::{Context, Poll}; + +use futures::Stream; +use futures_util::StreamExt; +use rand::Rng; +use sqlx::{Pool, Sqlite}; +use starknet_crypto::FieldElement; +use tokio::sync::mpsc::{channel, Receiver, Sender}; +use tokio::sync::RwLock; +use torii_core::cache::ModelCache; +use torii_core::error::{Error, ParseError}; +use torii_core::model::{build_sql_query, map_row_to_ty}; +use torii_core::simple_broker::SimpleBroker; +use torii_core::types::Entity; +use tracing::{error, trace}; + +use crate::proto; + +pub struct EventMessagesSubscriber { + /// Entity ids that the subscriber is interested in + hashed_keys: HashSet, + /// The channel to send the response back to the subscriber. + sender: Sender>, +} + +#[derive(Default)] +pub struct EventMessageManager { + subscribers: RwLock>, +} + +impl EventMessageManager { + pub async fn add_subscriber( + &self, + hashed_keys: Vec, + ) -> Result>, Error> { + let id = rand::thread_rng().gen::(); + let (sender, receiver) = channel(1); + + self.subscribers.write().await.insert( + id, + EventMessagesSubscriber { hashed_keys: hashed_keys.iter().cloned().collect(), sender }, + ); + + Ok(receiver) + } + + pub(super) async fn remove_subscriber(&self, id: usize) { + self.subscribers.write().await.remove(&id); + } +} + +#[must_use = "Service does nothing unless polled"] +pub struct Service { + pool: Pool, + subs_manager: Arc, + model_cache: Arc, + simple_broker: Pin + Send>>, +} + +impl Service { + pub fn new( + pool: Pool, + subs_manager: Arc, + model_cache: Arc, + ) -> Self { + Self { + pool, + subs_manager, + model_cache, + simple_broker: Box::pin(SimpleBroker::::subscribe()), + } + } + + async fn publish_updates( + subs: Arc, + cache: Arc, + pool: Pool, + hashed_keys: &str, + ) -> Result<(), Error> { + let mut closed_stream = Vec::new(); + + for (idx, sub) in subs.subscribers.read().await.iter() { + let hashed = FieldElement::from_str(hashed_keys).map_err(ParseError::FromStr)?; + // publish all updates if ids is empty or only ids that are subscribed to + if sub.hashed_keys.is_empty() || sub.hashed_keys.contains(&hashed) { + let models_query = r#" + SELECT group_concat(event_model.model_id) as model_names + FROM event_messages + JOIN event_model ON event_messages.id = event_model.entity_id + WHERE event_messages.id = ? + GROUP BY event_messages.id + "#; + let (model_names,): (String,) = + sqlx::query_as(models_query).bind(hashed_keys).fetch_one(&pool).await?; + let model_names: Vec<&str> = model_names.split(',').collect(); + let schemas = cache.schemas(model_names).await?; + + let entity_query = + format!("{} WHERE event_messages.id = ?", build_sql_query(&schemas)?); + let row = sqlx::query(&entity_query).bind(hashed_keys).fetch_one(&pool).await?; + + let models = schemas + .iter() + .map(|s| { + let mut struct_ty = + s.as_struct().expect("schema should be struct").to_owned(); + map_row_to_ty(&s.name(), &mut struct_ty, &row)?; + + Ok(struct_ty.try_into().unwrap()) + }) + .collect::, Error>>()?; + + let resp = proto::world::SubscribeEntityResponse { + entity: Some(proto::types::Entity { + hashed_keys: hashed.to_bytes_be().to_vec(), + models, + }), + }; + + if sub.sender.send(Ok(resp)).await.is_err() { + closed_stream.push(*idx); + } + } + } + + for id in closed_stream { + trace!(target = "subscription", "closing entity stream idx: {id}"); + subs.remove_subscriber(id).await + } + + Ok(()) + } +} + +impl Future for Service { + type Output = (); + + fn poll(self: std::pin::Pin<&mut Self>, cx: &mut Context<'_>) -> std::task::Poll { + let pin = self.get_mut(); + + while let Poll::Ready(Some(entity)) = pin.simple_broker.poll_next_unpin(cx) { + let subs = Arc::clone(&pin.subs_manager); + let cache = Arc::clone(&pin.model_cache); + let pool = pin.pool.clone(); + tokio::spawn(async move { + if let Err(e) = Service::publish_updates(subs, cache, pool, &entity.id).await { + error!(target = "subscription", "error when publishing entity update: {e}"); + } + }); + } + + Poll::Pending + } +} diff --git a/crates/torii/grpc/src/server/subscriptions/mod.rs b/crates/torii/grpc/src/server/subscriptions/mod.rs index 07b593d366..821328c325 100644 --- a/crates/torii/grpc/src/server/subscriptions/mod.rs +++ b/crates/torii/grpc/src/server/subscriptions/mod.rs @@ -1,3 +1,4 @@ pub mod entity; pub mod error; +pub mod event_message; pub mod model_diff; diff --git a/crates/torii/grpc/src/server/subscriptions/model_diff.rs b/crates/torii/grpc/src/server/subscriptions/model_diff.rs index ac73d15f6e..2aa15cc1eb 100644 --- a/crates/torii/grpc/src/server/subscriptions/model_diff.rs +++ b/crates/torii/grpc/src/server/subscriptions/model_diff.rs @@ -51,7 +51,7 @@ impl StateDiffManager { &self, reqs: Vec, ) -> Result>, Error> { - let id = rand::thread_rng().gen::(); + let id: usize = rand::thread_rng().gen::(); let (sender, receiver) = channel(1); diff --git a/crates/torii/libp2p/Cargo.toml b/crates/torii/libp2p/Cargo.toml index 51e308975d..0503c861f1 100644 --- a/crates/torii/libp2p/Cargo.toml +++ b/crates/torii/libp2p/Cargo.toml @@ -11,26 +11,36 @@ version.workspace = true futures.workspace = true rand = "0.8.5" serde.workspace = true -serde_json.workspace = true -thiserror.workspace = true -tracing-subscriber = { version = "0.3", features = ["env-filter"] } -tracing.workspace = true +# preserve order +anyhow.workspace = true async-trait = "0.1.77" +crypto-bigint.workspace = true +dojo-types.workspace = true regex = "1.10.3" -anyhow.workspace = true +serde_json = { version = "1.0.114", features = [ "preserve_order" ] } +starknet-core = "0.9.0" +starknet-crypto.workspace = true +starknet-ff = "0.3.6" +thiserror.workspace = true +tracing-subscriber = { version = "0.3", features = [ "env-filter" ] } +tracing.workspace = true +indexmap = "2.2.5" [dev-dependencies] +dojo-world = { path = "../../dojo-world", features = [ "metadata" ] } tempfile = "3.9.0" [target.'cfg(not(target_arch = "wasm32"))'.dependencies] -tokio.workspace = true libp2p = { git = "https://github.com/libp2p/rust-libp2p", features = [ "ed25519", "gossipsub", "identify", "macros", "noise", "ping", "quic", "relay", "tcp", "tokio", "yamux" ] } -libp2p-webrtc = { git = "https://github.com/libp2p/rust-libp2p", features = [ "tokio", "pem" ] } +libp2p-webrtc = { git = "https://github.com/libp2p/rust-libp2p", features = [ "pem", "tokio" ] } +tokio.workspace = true +torii-core.workspace = true +sqlx.workspace = true [target.'cfg(target_arch = "wasm32")'.dependencies] libp2p = { git = "https://github.com/libp2p/rust-libp2p", features = [ "ed25519", "gossipsub", "identify", "macros", "ping", "tcp", "wasm-bindgen" ] } libp2p-webrtc-websys = { git = "https://github.com/libp2p/rust-libp2p" } tracing-wasm = "0.2.1" -wasm-bindgen-test = "0.3.40" wasm-bindgen-futures = "0.4.40" +wasm-bindgen-test = "0.3.40" wasm-timer = "0.2.5" diff --git a/crates/torii/libp2p/mocks/example_baseTypes.json b/crates/torii/libp2p/mocks/example_baseTypes.json new file mode 100644 index 0000000000..759c5aae83 --- /dev/null +++ b/crates/torii/libp2p/mocks/example_baseTypes.json @@ -0,0 +1,39 @@ +{ + "types": { + "StarknetDomain": [ + { "name": "name", "type": "shortstring" }, + { "name": "version", "type": "shortstring" }, + { "name": "chainId", "type": "shortstring" }, + { "name": "revision", "type": "shortstring" } + ], + "Example": [ + { "name": "n0", "type": "felt" }, + { "name": "n1", "type": "bool" }, + { "name": "n2", "type": "string" }, + { "name": "n3", "type": "selector" }, + { "name": "n4", "type": "u128" }, + { "name": "n5", "type": "ContractAddress" }, + { "name": "n6", "type": "ClassHash" }, + { "name": "n7", "type": "timestamp" }, + { "name": "n8", "type": "shortstring" } + ] + }, + "primaryType": "Example", + "domain": { + "name": "StarkNet Mail", + "version": "1", + "chainId": "1", + "revision": "1" + }, + "message": { + "n0": "0x3e8", + "n1": true, + "n2": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.", + "n3": "transfer", + "n4": "0x3e8", + "n5": "0x3e8", + "n6": "0x3e8", + "n7": 1000, + "n8": "transfer" + } +} \ No newline at end of file diff --git a/crates/torii/libp2p/mocks/example_enum.json b/crates/torii/libp2p/mocks/example_enum.json new file mode 100644 index 0000000000..c10ae99042 --- /dev/null +++ b/crates/torii/libp2p/mocks/example_enum.json @@ -0,0 +1,28 @@ +{ + "types": { + "StarknetDomain": [ + { "name": "name", "type": "shortstring" }, + { "name": "version", "type": "shortstring" }, + { "name": "chainId", "type": "shortstring" }, + { "name": "revision", "type": "shortstring" } + ], + "Example": [{ "name": "someEnum", "type": "enum", "contains": "MyEnum" }], + "MyEnum": [ + { "name": "Variant 1", "type": "()" }, + { "name": "Variant 2", "type": "(u128,u128*)" }, + { "name": "Variant 3", "type": "(u128)" } + ] + }, + "primaryType": "Example", + "domain": { + "name": "StarkNet Mail", + "version": "1", + "chainId": "1", + "revision": "1" + }, + "message": { + "someEnum": { + "Variant 2": [2, [0, 1]] + } + } +} diff --git a/crates/torii/libp2p/mocks/example_presetTypes.json b/crates/torii/libp2p/mocks/example_presetTypes.json new file mode 100644 index 0000000000..f2cc9d7bc5 --- /dev/null +++ b/crates/torii/libp2p/mocks/example_presetTypes.json @@ -0,0 +1,37 @@ +{ + "types": { + "StarknetDomain": [ + { "name": "name", "type": "shortstring" }, + { "name": "version", "type": "shortstring" }, + { "name": "chainId", "type": "shortstring" }, + { "name": "revision", "type": "shortstring" } + ], + "Example": [ + { "name": "n0", "type": "TokenAmount" }, + { "name": "n1", "type": "NftId" } + ] + }, + "primaryType": "Example", + "domain": { + "name": "StarkNet Mail", + "version": "1", + "chainId": "1", + "revision": "1" + }, + "message": { + "n0": { + "token_address": "0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", + "amount": { + "low": "0x3e8", + "high": "0x0" + } + }, + "n1": { + "collection_address": "0x049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7", + "token_id": { + "low": "0x3e8", + "high": "0x0" + } + } + } +} diff --git a/crates/torii/libp2p/mocks/mail_StructArray.json b/crates/torii/libp2p/mocks/mail_StructArray.json new file mode 100644 index 0000000000..6f5b58f31c --- /dev/null +++ b/crates/torii/libp2p/mocks/mail_StructArray.json @@ -0,0 +1,44 @@ +{ + "types": { + "StarknetDomain": [ + { "name": "name", "type": "shortstring" }, + { "name": "version", "type": "shortstring" }, + { "name": "chainId", "type": "shortstring" } + ], + "Person": [ + { "name": "name", "type": "felt" }, + { "name": "wallet", "type": "felt" } + ], + "Post": [ + { "name": "title", "type": "felt" }, + { "name": "content", "type": "felt" } + ], + "Mail": [ + { "name": "from", "type": "Person" }, + { "name": "to", "type": "Person" }, + { "name": "posts_len", "type": "felt" }, + { "name": "posts", "type": "Post*" } + ] + }, + "primaryType": "Mail", + "domain": { + "name": "StarkNet Mail", + "version": "1", + "chainId": "1" + }, + "message": { + "from": { + "name": "Cow", + "wallet": "0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826" + }, + "to": { + "name": "Bob", + "wallet": "0xbBbBBBBbbBBBbbbBbbBbbbbBBbBbbbbBbBbbBBbB" + }, + "posts_len": 2, + "posts": [ + { "title": "Greeting", "content": "Hello, Bob!" }, + { "title": "Farewell", "content": "Goodbye, Bob!" } + ] + } +} diff --git a/crates/torii/libp2p/src/client/mod.rs b/crates/torii/libp2p/src/client/mod.rs index 9d26458917..2efbb9c655 100644 --- a/crates/torii/libp2p/src/client/mod.rs +++ b/crates/torii/libp2p/src/client/mod.rs @@ -5,7 +5,7 @@ use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender}; use futures::channel::oneshot; use futures::lock::Mutex; use futures::{select, StreamExt}; -use libp2p::gossipsub::{self, IdentTopic, MessageId, TopicHash}; +use libp2p::gossipsub::{self, IdentTopic, MessageId}; use libp2p::swarm::{NetworkBehaviour, Swarm, SwarmEvent}; use libp2p::{identify, identity, ping, Multiaddr, PeerId}; #[cfg(not(target_arch = "wasm32"))] @@ -16,7 +16,7 @@ pub mod events; use crate::client::events::ClientEvent; use crate::constants; use crate::errors::Error; -use crate::types::{ClientMessage, ServerMessage}; +use crate::types::Message; #[derive(NetworkBehaviour)] #[behaviour(out_event = "ClientEvent")] @@ -27,35 +27,18 @@ struct Behaviour { } pub struct RelayClient { - pub message_receiver: Arc>>, pub command_sender: CommandSender, pub event_loop: Arc>, } pub struct EventLoop { swarm: Swarm, - message_sender: UnboundedSender, command_receiver: UnboundedReceiver, } -#[derive(Debug, Clone)] -pub struct Message { - // PeerId of the relay that propagated the message - pub propagation_source: PeerId, - // Peer that published the message - pub source: PeerId, - pub message_id: MessageId, - // Hash of the topic message was published to - pub topic: TopicHash, - // Raw message payload - pub data: Vec, -} - #[derive(Debug)] enum Command { - Subscribe(String, oneshot::Sender>), - Unsubscribe(String, oneshot::Sender>), - Publish(String, Vec, oneshot::Sender>), + Publish(Message, oneshot::Sender>), WaitForRelay(oneshot::Sender>), } @@ -102,12 +85,10 @@ impl RelayClient { info!(target: "torii::relay::client", addr = %relay_addr, "Dialing relay"); swarm.dial(relay_addr.parse::()?)?; - let (message_sender, message_receiver) = futures::channel::mpsc::unbounded(); let (command_sender, command_receiver) = futures::channel::mpsc::unbounded(); Ok(Self { command_sender: CommandSender::new(command_sender), - message_receiver: Arc::new(Mutex::new(message_receiver)), - event_loop: Arc::new(Mutex::new(EventLoop { swarm, message_sender, command_receiver })), + event_loop: Arc::new(Mutex::new(EventLoop { swarm, command_receiver })), }) } @@ -155,12 +136,10 @@ impl RelayClient { info!(target: "torii::relay::client", addr = %relay_addr, "Dialing relay"); swarm.dial(relay_addr.parse::()?)?; - let (message_sender, message_receiver) = futures::channel::mpsc::unbounded(); let (command_sender, command_receiver) = futures::channel::mpsc::unbounded(); Ok(Self { command_sender: CommandSender::new(command_sender), - message_receiver: Arc::new(Mutex::new(message_receiver)), - event_loop: Arc::new(Mutex::new(EventLoop { swarm, message_sender, command_receiver })), + event_loop: Arc::new(Mutex::new(EventLoop { swarm, command_receiver })), }) } } @@ -174,28 +153,10 @@ impl CommandSender { Self { sender } } - pub async fn subscribe(&mut self, room: String) -> Result { + pub async fn publish(&mut self, data: Message) -> Result { let (tx, rx) = oneshot::channel(); - self.sender.unbounded_send(Command::Subscribe(room, tx)).expect("Failed to send command"); - - rx.await.expect("Failed to receive response") - } - - pub async fn unsubscribe(&mut self, room: String) -> Result { - let (tx, rx) = oneshot::channel(); - - self.sender.unbounded_send(Command::Unsubscribe(room, tx)).expect("Failed to send command"); - - rx.await.expect("Failed to receive response") - } - - pub async fn publish(&mut self, topic: String, data: Vec) -> Result { - let (tx, rx) = oneshot::channel(); - - self.sender - .unbounded_send(Command::Publish(topic, data, tx)) - .expect("Failed to send command"); + self.sender.unbounded_send(Command::Publish(data, tx)).expect("Failed to send command"); rx.await.expect("Failed to receive response") } @@ -219,15 +180,9 @@ impl EventLoop { select! { command = self.command_receiver.select_next_some() => { match command { - Command::Subscribe(room, sender) => { - sender.send(self.subscribe(&room)).expect("Failed to send response"); - }, - Command::Unsubscribe(room, sender) => { - sender.send(self.unsubscribe(&room)).expect("Failed to send response"); - }, - Command::Publish(topic, data, sender) => { - sender.send(self.publish(topic, data)).expect("Failed to send response"); - }, + Command::Publish(data, sender) => { + sender.send(self.publish(&data)).expect("Failed to send response"); + } Command::WaitForRelay(sender) => { if is_relay_ready { sender.send(Ok(())).expect("Failed to send response"); @@ -239,37 +194,13 @@ impl EventLoop { }, event = self.swarm.select_next_some() => { match event { - SwarmEvent::Behaviour(event) => { - match event { - // Handle behaviour events. - ClientEvent::Gossipsub(gossipsub::Event::Message { - propagation_source: peer_id, - message_id, - message, - }) => { - // deserialize message payload - let message_payload: ServerMessage = serde_json::from_slice(&message.data) - .expect("Failed to deserialize message"); - - let message = Message { - propagation_source: peer_id, - source: PeerId::from_bytes(&message_payload.peer_id).expect("Failed to parse peer id"), - message_id, - topic: message.topic, - data: message_payload.data, - }; + SwarmEvent::Behaviour(ClientEvent::Gossipsub(gossipsub::Event::Subscribed { topic, .. })) => { + // Handle behaviour events. + info!(target: "torii::relay::client::gossipsub", topic = ?topic, "Relay ready. Received subscription confirmation"); - self.message_sender.unbounded_send(message).expect("Failed to send message"); - } - ClientEvent::Gossipsub(gossipsub::Event::Subscribed { topic, .. }) => { - info!(target: "torii::relay::client::gossipsub", topic = ?topic, "Relay ready. Received subscription confirmation"); - - is_relay_ready = true; - if let Some(tx) = relay_ready_tx.take() { - tx.send(Ok(())).expect("Failed to send response"); - } - } - _ => {} + is_relay_ready = true; + if let Some(tx) = relay_ready_tx.take() { + tx.send(Ok(())).expect("Failed to send response"); } } SwarmEvent::ConnectionClosed { cause: Some(cause), .. } => { @@ -287,23 +218,13 @@ impl EventLoop { } } - fn subscribe(&mut self, room: &str) -> Result { - let topic = IdentTopic::new(room); - self.swarm.behaviour_mut().gossipsub.subscribe(&topic).map_err(Error::SubscriptionError) - } - - fn unsubscribe(&mut self, room: &str) -> Result { - let topic = IdentTopic::new(room); - self.swarm.behaviour_mut().gossipsub.unsubscribe(&topic).map_err(Error::PublishError) - } - - fn publish(&mut self, topic: String, data: Vec) -> Result { + fn publish(&mut self, data: &Message) -> Result { self.swarm .behaviour_mut() .gossipsub .publish( IdentTopic::new(constants::MESSAGING_TOPIC), - serde_json::to_string(&ClientMessage { topic, data }).unwrap(), + serde_json::to_string(data).unwrap(), ) .map_err(Error::PublishError) } diff --git a/crates/torii/libp2p/src/errors.rs b/crates/torii/libp2p/src/errors.rs index b9845817fc..2920e43a6e 100644 --- a/crates/torii/libp2p/src/errors.rs +++ b/crates/torii/libp2p/src/errors.rs @@ -39,4 +39,7 @@ pub enum Error { #[error("Failed to read certificate: {0}")] ReadCertificateError(anyhow::Error), + + #[error("Invalid message provided: {0}")] + InvalidMessageError(String), } diff --git a/crates/torii/libp2p/src/lib.rs b/crates/torii/libp2p/src/lib.rs index db6b58cbeb..1eaf1a17bd 100644 --- a/crates/torii/libp2p/src/lib.rs +++ b/crates/torii/libp2p/src/lib.rs @@ -4,4 +4,5 @@ pub mod errors; #[cfg(not(target_arch = "wasm32"))] pub mod server; mod tests; +pub mod typed_data; pub mod types; diff --git a/crates/torii/libp2p/src/server/mod.rs b/crates/torii/libp2p/src/server/mod.rs index 2d321d1242..0ef61fc8ad 100644 --- a/crates/torii/libp2p/src/server/mod.rs +++ b/crates/torii/libp2p/src/server/mod.rs @@ -2,10 +2,15 @@ use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; use std::net::Ipv4Addr; use std::path::Path; +use std::str::FromStr; use std::time::Duration; use std::{fs, io}; +use crypto_bigint::U256; +use dojo_types::primitive::Primitive; +use dojo_types::schema::{Member, Struct, Ty}; use futures::StreamExt; +use indexmap::IndexMap; use libp2p::core::multiaddr::Protocol; use libp2p::core::muxing::StreamMuxerBox; use libp2p::core::Multiaddr; @@ -14,7 +19,11 @@ use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use libp2p::{identify, identity, noise, ping, relay, tcp, yamux, PeerId, Swarm, Transport}; use libp2p_webrtc as webrtc; use rand::thread_rng; -use tracing::info; +use serde_json::Number; +use starknet_crypto::{poseidon_hash_many, verify}; +use starknet_ff::FieldElement; +use torii_core::sql::Sql; +use tracing::{info, warn}; use webrtc::tokio::Certificate; use crate::constants; @@ -22,8 +31,11 @@ use crate::errors::Error; mod events; +use sqlx::Row; + use crate::server::events::ServerEvent; -use crate::types::{ClientMessage, ServerMessage}; +use crate::typed_data::PrimitiveType; +use crate::types::Message; #[derive(NetworkBehaviour)] #[behaviour(out_event = "ServerEvent")] @@ -36,10 +48,12 @@ pub struct Behaviour { pub struct Relay { swarm: Swarm, + db: Sql, } impl Relay { pub fn new( + pool: Sql, port: u16, port_webrtc: u16, local_key_path: Option, @@ -129,7 +143,7 @@ impl Relay { .subscribe(&IdentTopic::new(constants::MESSAGING_TOPIC)) .unwrap(); - Ok(Self { swarm }) + Ok(Self { swarm, db: pool }) } pub async fn run(&mut self) { @@ -142,45 +156,186 @@ impl Relay { message_id, message, }) => { - // Deserialize message. + // Deserialize typed data. // We shouldn't panic here - let message = serde_json::from_slice::(&message.data); - if let Err(e) = message { - info!( - target: "torii::relay::server::gossipsub", - error = %e, - "Failed to deserialize message" - ); - continue; - } - - let message = message.unwrap(); + let data = match serde_json::from_slice::(&message.data) { + Ok(message) => message, + Err(e) => { + info!( + target: "torii::relay::server::gossipsub", + error = %e, + "Failed to deserialize message" + ); + continue; + } + }; + + let ty = match validate_message(&data.message.message) { + Ok(parsed_message) => parsed_message, + Err(e) => { + info!( + target: "torii::relay::server::gossipsub", + error = %e, + "Failed to validate message" + ); + continue; + } + }; info!( target: "torii::relay::server", message_id = %message_id, peer_id = %peer_id, - topic = %message.topic, - data = %String::from_utf8_lossy(&message.data), + data = ?data, "Received message" ); - // forward message to room - let server_message = - ServerMessage { peer_id: peer_id.to_bytes(), data: message.data }; + // retrieve entity identity from db + let mut pool = match self.db.pool.acquire().await { + Ok(pool) => pool, + Err(e) => { + warn!( + target: "torii::relay::server", + error = %e, + "Failed to acquire pool" + ); + continue; + } + }; + + let keys = match ty_keys(&ty) { + Ok(keys) => keys, + Err(e) => { + warn!( + target: "torii::relay::server", + error = %e, + "Failed to get message model keys" + ); + continue; + } + }; + + // select only identity field, if doesn't exist, empty string + let entity = match sqlx::query("SELECT * FROM ? WHERE id = ?") + .bind(&ty.as_struct().unwrap().name) + .bind(format!("{:#x}", poseidon_hash_many(&keys))) + .fetch_optional(&mut *pool) + .await + { + Ok(entity_identity) => entity_identity, + Err(e) => { + warn!( + target: "torii::relay::server", + error = %e, + "Failed to fetch entity" + ); + continue; + } + }; + + if entity.is_none() { + // we can set the entity without checking identity + if let Err(e) = + self.db.set_entity(ty, &message_id.to_string()).await + { + info!( + target: "torii::relay::server", + error = %e, + "Failed to set message" + ); + continue; + } else { + info!( + target: "torii::relay::server", + message_id = %message_id, + peer_id = %peer_id, + "Message set" + ); + continue; + } + } - if let Err(e) = self.swarm.behaviour_mut().gossipsub.publish( - IdentTopic::new(message.topic), - serde_json::to_string(&server_message) - .expect("Failed to serialize message") - .as_bytes(), + let entity = entity.unwrap(); + let identity = match FieldElement::from_str(&match entity + .try_get::("identity") + { + Ok(identity) => identity, + Err(e) => { + warn!( + target: "torii::relay::server", + error = %e, + "Failed to get identity from model" + ); + continue; + } + }) { + Ok(identity) => identity, + Err(e) => { + warn!( + target: "torii::relay::server", + error = %e, + "Failed to parse identity" + ); + continue; + } + }; + + // TODO: have a nonce in model to check + // against entity nonce and message nonce + // to prevent replay attacks. + + // Verify the signature + let message_hash = if let Ok(message) = data.message.encode(identity) { + message + } else { + info!( + target: "torii::relay::server", + "Failed to encode message" + ); + continue; + }; + + // for the public key used for verification; use identity from model + if let Ok(valid) = verify( + &identity, + &message_hash, + &data.signature_r, + &data.signature_s, ) { + if !valid { + info!( + target: "torii::relay::server", + "Invalid signature" + ); + continue; + } + } else { info!( - target: "torii::relay::server::gossipsub", + target: "torii::relay::server", + "Failed to verify signature" + ); + continue; + } + + if let Err(e) = self + .db + // event id is message id + .set_entity(ty, &message_id.to_string()) + .await + { + info!( + target: "torii::relay::server", error = %e, - "Failed to publish message" + "Failed to set message" ); } + + info!( + target: "torii::relay::server", + message_id = %message_id, + peer_id = %peer_id, + "Message verified and set" + ); } ServerEvent::Gossipsub(gossipsub::Event::Subscribed { peer_id, topic }) => { info!( @@ -233,6 +388,293 @@ impl Relay { } } +fn ty_keys(ty: &Ty) -> Result, Error> { + if let Ty::Struct(s) = &ty { + let mut keys = Vec::new(); + for m in s.keys() { + keys.extend(m.serialize().map_err(|_| { + Error::InvalidMessageError("Failed to serialize model key".to_string()) + })?); + } + Ok(keys) + } else { + Err(Error::InvalidMessageError("Entity is not a struct".to_string())) + } +} + +pub fn parse_ty_to_object(ty: &Ty) -> Result, Error> { + match ty { + Ty::Struct(struct_ty) => { + let mut object = IndexMap::new(); + for member in &struct_ty.children { + let mut member_object = IndexMap::new(); + member_object.insert("key".to_string(), PrimitiveType::Bool(member.key)); + member_object.insert( + "type".to_string(), + PrimitiveType::String(ty_to_string_type(&member.ty)), + ); + member_object.insert("value".to_string(), parse_ty_to_primitive(&member.ty)?); + object.insert(member.name.clone(), PrimitiveType::Object(member_object)); + } + Ok(object) + } + _ => Err(Error::InvalidMessageError("Expected Struct type".to_string())), + } +} + +pub fn ty_to_string_type(ty: &Ty) -> String { + match ty { + Ty::Primitive(primitive) => match primitive { + Primitive::U8(_) => "u8".to_string(), + Primitive::U16(_) => "u16".to_string(), + Primitive::U32(_) => "u32".to_string(), + Primitive::USize(_) => "usize".to_string(), + Primitive::U64(_) => "u64".to_string(), + Primitive::U128(_) => "u128".to_string(), + Primitive::U256(_) => "u256".to_string(), + Primitive::Felt252(_) => "felt".to_string(), + Primitive::ClassHash(_) => "class_hash".to_string(), + Primitive::ContractAddress(_) => "contract_address".to_string(), + Primitive::Bool(_) => "bool".to_string(), + }, + Ty::Struct(_) => "struct".to_string(), + Ty::Tuple(_) => "array".to_string(), + Ty::Enum(_) => "enum".to_string(), + } +} + +pub fn parse_ty_to_primitive(ty: &Ty) -> Result { + match ty { + Ty::Primitive(primitive) => match primitive { + Primitive::U8(value) => { + Ok(PrimitiveType::Number(Number::from(value.map(|v| v as u64).unwrap_or(0u64)))) + } + Primitive::U16(value) => { + Ok(PrimitiveType::Number(Number::from(value.map(|v| v as u64).unwrap_or(0u64)))) + } + Primitive::U32(value) => { + Ok(PrimitiveType::Number(Number::from(value.map(|v| v as u64).unwrap_or(0u64)))) + } + Primitive::USize(value) => { + Ok(PrimitiveType::Number(Number::from(value.map(|v| v as u64).unwrap_or(0u64)))) + } + Primitive::U64(value) => { + Ok(PrimitiveType::Number(Number::from(value.map(|v| v).unwrap_or(0u64)))) + } + Primitive::U128(value) => Ok(PrimitiveType::String( + value.map(|v| v.to_string()).unwrap_or_else(|| "0".to_string()), + )), + Primitive::U256(value) => Ok(PrimitiveType::String( + value.map(|v| format!("{:#x}", v)).unwrap_or_else(|| "0".to_string()), + )), + Primitive::Felt252(value) => Ok(PrimitiveType::String( + value.map(|v| format!("{:#x}", v)).unwrap_or_else(|| "0".to_string()), + )), + Primitive::ClassHash(value) => Ok(PrimitiveType::String( + value.map(|v| format!("{:#x}", v)).unwrap_or_else(|| "0".to_string()), + )), + Primitive::ContractAddress(value) => Ok(PrimitiveType::String( + value.map(|v| format!("{:#x}", v)).unwrap_or_else(|| "0".to_string()), + )), + Primitive::Bool(value) => Ok(PrimitiveType::Bool(value.unwrap_or(false))), + }, + _ => Err(Error::InvalidMessageError("Expected Primitive type".to_string())), + } +} + +pub fn parse_object_to_ty( + name: String, + object: &IndexMap, +) -> Result { + let mut ty_struct = Struct { name, children: vec![] }; + + for (field_name, value) in object { + // value has to be of type object + let object = if let PrimitiveType::Object(object) = value { + object + } else { + return Err(Error::InvalidMessageError("Value is not an object".to_string())); + }; + + let r#type = if let Some(r#type) = object.get("type") { + if let PrimitiveType::String(r#type) = r#type { + r#type + } else { + return Err(Error::InvalidMessageError("Type is not a string".to_string())); + } + } else { + return Err(Error::InvalidMessageError("Type is missing".to_string())); + }; + + let value = if let Some(value) = object.get("value") { + value + } else { + return Err(Error::InvalidMessageError("Value is missing".to_string())); + }; + + let key = if let Some(key) = object.get("key") { + if let PrimitiveType::Bool(key) = key { + *key + } else { + return Err(Error::InvalidMessageError("Key is not a boolean".to_string())); + } + } else { + return Err(Error::InvalidMessageError("Key is missing".to_string())); + }; + + match value { + PrimitiveType::Object(object) => { + let ty = parse_object_to_ty(field_name.clone(), object)?; + ty_struct.children.push(Member { name: field_name.clone(), ty, key }); + } + PrimitiveType::Array(_) => { + // tuples not supported yet + unimplemented!() + } + PrimitiveType::Number(number) => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: match r#type.as_str() { + "u8" => Ty::Primitive(Primitive::U8(Some(number.as_u64().unwrap() as u8))), + "u16" => { + Ty::Primitive(Primitive::U16(Some(number.as_u64().unwrap() as u16))) + } + "u32" => { + Ty::Primitive(Primitive::U32(Some(number.as_u64().unwrap() as u32))) + } + "usize" => { + Ty::Primitive(Primitive::USize(Some(number.as_u64().unwrap() as u32))) + } + "u64" => Ty::Primitive(Primitive::U64(Some(number.as_u64().unwrap()))), + _ => { + return Err(Error::InvalidMessageError( + "Invalid number type".to_string(), + )); + } + }, + key, + }); + } + PrimitiveType::Bool(boolean) => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::Bool(Some(*boolean))), + key, + }); + } + PrimitiveType::String(string) => match r#type.as_str() { + "u8" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::U8(Some(u8::from_str(string).unwrap()))), + key, + }); + } + "u16" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::U16(Some(u16::from_str(string).unwrap()))), + key, + }); + } + "u32" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::U32(Some(u32::from_str(string).unwrap()))), + key, + }); + } + "usize" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::USize(Some(u32::from_str(string).unwrap()))), + key, + }); + } + "u64" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::U64(Some(u64::from_str(string).unwrap()))), + key, + }); + } + "u128" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::U128(Some(u128::from_str(string).unwrap()))), + key, + }); + } + "u256" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::U256(Some(U256::from_be_hex(string)))), + key, + }); + } + "felt" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::Felt252(Some( + FieldElement::from_str(string).unwrap(), + ))), + key, + }); + } + "class_hash" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::ClassHash(Some( + FieldElement::from_str(string).unwrap(), + ))), + key, + }); + } + "contract_address" => { + ty_struct.children.push(Member { + name: field_name.clone(), + ty: Ty::Primitive(Primitive::ContractAddress(Some( + FieldElement::from_str(string).unwrap(), + ))), + key, + }); + } + _ => { + return Err(Error::InvalidMessageError("Invalid string type".to_string())); + } + }, + } + } + + Ok(Ty::Struct(ty_struct)) +} + +// Validates the message model +// and returns the identity and signature +fn validate_message(message: &IndexMap) -> Result { + let model_name = if let Some(model_name) = message.get("model") { + if let PrimitiveType::String(model_name) = model_name { + model_name + } else { + return Err(Error::InvalidMessageError("Model name is not a string".to_string())); + } + } else { + return Err(Error::InvalidMessageError("Model name is missing".to_string())); + }; + + let model = if let Some(object) = message.get(model_name) { + if let PrimitiveType::Object(object) = object { + parse_object_to_ty(model_name.clone(), object)? + } else { + return Err(Error::InvalidMessageError("Model is not a struct".to_string())); + } + } else { + return Err(Error::InvalidMessageError("Model is missing".to_string())); + }; + + Ok(model) +} + fn read_or_create_identity(path: &Path) -> anyhow::Result { if path.exists() { let bytes = fs::read(path)?; diff --git a/crates/torii/libp2p/src/tests.rs b/crates/torii/libp2p/src/tests.rs index 225dc15a33..b19a5be1a4 100644 --- a/crates/torii/libp2p/src/tests.rs +++ b/crates/torii/libp2p/src/tests.rs @@ -2,8 +2,6 @@ mod test { use std::error::Error; - use futures::StreamExt; - use crate::client::RelayClient; #[cfg(target_arch = "wasm32")] @@ -15,18 +13,32 @@ mod test { #[cfg(not(target_arch = "wasm32"))] #[tokio::test] async fn test_client_messaging() -> Result<(), Box> { - use std::time::Duration; - + use dojo_types::schema::{Member, Struct, Ty}; + use indexmap::IndexMap; + use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; + use starknet_ff::FieldElement; use tokio::time::sleep; - use tokio::{self, select}; + use torii_core::sql::Sql; - use crate::server::Relay; + use crate::server::{parse_ty_to_object, Relay}; + use crate::typed_data::{Domain, TypedData}; + use crate::types::Message; let _ = tracing_subscriber::fmt() .with_env_filter("torii::relay::client=debug,torii::relay::server=debug") .try_init(); + + // Database + let options = ::from_str("sqlite::memory:") + .unwrap() + .create_if_missing(true); + let pool = SqlitePoolOptions::new().max_connections(5).connect_with(options).await.unwrap(); + sqlx::migrate!("../migrations").run(&pool).await.unwrap(); + + let db = Sql::new(pool.clone(), FieldElement::from_bytes_be(&[0; 32]).unwrap()).await?; + // Initialize the relay server - let mut relay_server: Relay = Relay::new(9900, 9901, None, None)?; + let mut relay_server: Relay = Relay::new(db, 9900, 9901, None, None)?; tokio::spawn(async move { relay_server.run().await; }); @@ -37,27 +49,72 @@ mod test { client.event_loop.lock().await.run().await; }); - client.command_sender.subscribe("mawmaw".to_string()).await?; client.command_sender.wait_for_relay().await?; - client.command_sender.publish("mawmaw".to_string(), "mimi".as_bytes().to_vec()).await?; + let mut data = Struct { name: "Message".to_string(), children: vec![] }; + + data.children.push(Member { + name: "player".to_string(), + ty: dojo_types::schema::Ty::Primitive( + dojo_types::primitive::Primitive::ContractAddress(Some( + FieldElement::from_bytes_be(&[0; 32]).unwrap(), + )), + ), + key: true, + }); - let message_receiver = client.message_receiver.clone(); - let mut message_receiver = message_receiver.lock().await; - - loop { - select! { - event = message_receiver.next() => { - if let Some(message) = event { - println!("Received message from {:?} with id {:?}: {:?}", message.source, message.message_id, message); - return Ok(()); - } - } - _ = sleep(Duration::from_secs(5)) => { - println!("Test Failed: Did not receive message within 5 seconds."); - return Err("Timeout reached without receiving a message".into()); - } - } - } + data.children.push(Member { + name: "message".to_string(), + ty: dojo_types::schema::Ty::Primitive(dojo_types::primitive::Primitive::U8(Some(0))), + key: false, + }); + + let mut typed_data = TypedData::new( + IndexMap::new(), + "Message", + Domain::new("Message", "1", "0x0", Some("1")), + IndexMap::new(), + ); + + typed_data.message.insert( + "model".to_string(), + crate::typed_data::PrimitiveType::String("Message".to_string()), + ); + typed_data.message.insert( + "Message".to_string(), + crate::typed_data::PrimitiveType::Object( + parse_ty_to_object(&Ty::Struct(data.clone())).unwrap(), + ), + ); + + println!("object ty: {:?}", parse_ty_to_object(&Ty::Struct(data)).unwrap()); + + client + .command_sender + .publish(Message { + message: typed_data, + signature_r: FieldElement::from_bytes_be(&[0; 32]).unwrap(), + signature_s: FieldElement::from_bytes_be(&[0; 32]).unwrap(), + }) + .await?; + + sleep(std::time::Duration::from_secs(2)).await; + + Ok(()) + // loop { + // select! { + // entity = sqlx::query("SELECT * FROM entities WHERE id = ?") + // .bind(format!("{:#x}", FieldElement::from_bytes_be(&[0; + // 32]).unwrap())).fetch_one(&pool) => { if let Ok(_) = entity { + // println!("Test OK: Received message within 5 seconds."); + // return Ok(()); + // } + // } + // _ = sleep(Duration::from_secs(5)) => { + // println!("Test Failed: Did not receive message within 5 seconds."); + // return Err("Timeout reached without receiving a message".into()); + // } + // } + // } } #[cfg(target_arch = "wasm32")] diff --git a/crates/torii/libp2p/src/typed_data.rs b/crates/torii/libp2p/src/typed_data.rs new file mode 100644 index 0000000000..4dec9753e9 --- /dev/null +++ b/crates/torii/libp2p/src/typed_data.rs @@ -0,0 +1,678 @@ +use std::str::FromStr; + +use indexmap::IndexMap; +use serde::{Deserialize, Serialize}; +use serde_json::Number; +use starknet_core::utils::{ + cairo_short_string_to_felt, get_selector_from_name, CairoShortStringToFeltError, +}; +use starknet_crypto::poseidon_hash_many; +use starknet_ff::FieldElement; + +use crate::errors::Error; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct SimpleField { + pub name: String, + pub r#type: String, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ParentField { + pub name: String, + pub r#type: String, + pub contains: String, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum Field { + ParentType(ParentField), + SimpleType(SimpleField), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum PrimitiveType { + // All of object types. Including preset types + Object(IndexMap), + Array(Vec), + Bool(bool), + // comprehensive representation of + // String, ShortString, Selector and Felt + String(String), + // For JSON numbers. Formed into a Felt + Number(Number), +} + +fn get_preset_types() -> IndexMap> { + let mut types = IndexMap::new(); + + types.insert( + "TokenAmount".to_string(), + vec![ + Field::SimpleType(SimpleField { + name: "token_address".to_string(), + r#type: "ContractAddress".to_string(), + }), + Field::SimpleType(SimpleField { + name: "amount".to_string(), + r#type: "u256".to_string(), + }), + ], + ); + + types.insert( + "NftId".to_string(), + vec![ + Field::SimpleType(SimpleField { + name: "collection_address".to_string(), + r#type: "ContractAddress".to_string(), + }), + Field::SimpleType(SimpleField { + name: "token_id".to_string(), + r#type: "u256".to_string(), + }), + ], + ); + + types.insert( + "u256".to_string(), + vec![ + Field::SimpleType(SimpleField { name: "low".to_string(), r#type: "u128".to_string() }), + Field::SimpleType(SimpleField { name: "high".to_string(), r#type: "u128".to_string() }), + ], + ); + + types +} + +// Get the fields of a specific type +// Looks up both the types hashmap as well as the preset types +// Returns the fields and the hashmap of types +fn get_fields(name: &str, types: &IndexMap>) -> Result, Error> { + if let Some(fields) = types.get(name) { + return Ok(fields.clone()); + } + + Err(Error::InvalidMessageError(format!("Type {} not found", name))) +} + +fn get_dependencies( + name: &str, + types: &IndexMap>, + dependencies: &mut Vec, +) -> Result<(), Error> { + if dependencies.contains(&name.to_string()) { + return Ok(()); + } + + dependencies.push(name.to_string()); + + for field in get_fields(name, types)? { + let mut field_type = match field { + Field::SimpleType(simple_field) => simple_field.r#type.clone(), + Field::ParentType(parent_field) => parent_field.contains.clone(), + }; + + field_type = field_type.trim_end_matches('*').to_string(); + + if types.contains_key(&field_type) && !dependencies.contains(&field_type) { + get_dependencies(&field_type, types, dependencies)?; + } + } + + Ok(()) +} + +pub fn encode_type(name: &str, types: &IndexMap>) -> Result { + let mut type_hash = String::new(); + + // get dependencies + let mut dependencies: Vec = Vec::new(); + get_dependencies(name, types, &mut dependencies)?; + + // sort dependencies + dependencies.sort_by_key(|dep| dep.to_lowercase()); + + for dep in dependencies { + type_hash += &format!("\"{}\"", dep); + + type_hash += "("; + + let fields = get_fields(&dep, types)?; + for (idx, field) in fields.iter().enumerate() { + match field { + Field::SimpleType(simple_field) => { + // if ( at start and ) at end + if simple_field.r#type.starts_with('(') && simple_field.r#type.ends_with(')') { + let inner_types = + &simple_field.r#type[1..simple_field.r#type.len() - 1] + .split(',') + .map(|t| { + if !t.is_empty() { format!("\"{}\"", t) } else { t.to_string() } + }) + .collect::>() + .join(","); + type_hash += &format!("\"{}\":({})", simple_field.name, inner_types); + } else { + type_hash += + &format!("\"{}\":\"{}\"", simple_field.name, simple_field.r#type); + } + } + Field::ParentType(parent_field) => { + type_hash += + &format!("\"{}\":\"{}\"", parent_field.name, parent_field.contains); + } + } + + if idx < fields.len() - 1 { + type_hash += ","; + } + } + + type_hash += ")"; + } + + Ok(type_hash) +} + +fn byte_array_from_string( + target_string: &str, +) -> Result<(Vec, FieldElement, usize), CairoShortStringToFeltError> { + let short_strings: Vec<&str> = split_long_string(target_string); + let remainder = short_strings.last().unwrap_or(&""); + + let mut short_strings_encoded = short_strings + .iter() + .map(|&s| cairo_short_string_to_felt(s)) + .collect::, _>>()?; + + let (pending_word, pending_word_length) = if remainder.is_empty() || remainder.len() == 31 { + (FieldElement::ZERO, 0) + } else { + (short_strings_encoded.pop().unwrap(), remainder.len()) + }; + + Ok((short_strings_encoded, pending_word, pending_word_length)) +} + +fn split_long_string(long_str: &str) -> Vec<&str> { + let mut result = Vec::new(); + + let mut start = 0; + while start < long_str.len() { + let end = (start + 31).min(long_str.len()); + result.push(&long_str[start..end]); + start = end; + } + + result +} + +#[derive(Debug, Default)] +pub struct Ctx { + pub base_type: String, + pub parent_type: String, + pub is_preset: bool, +} + +pub(crate) struct FieldInfo { + _name: String, + r#type: String, + base_type: String, + index: usize, +} + +pub(crate) fn get_value_type( + name: &str, + types: &IndexMap>, +) -> Result { + // iter both "types" and "preset_types" to find the field + for (idx, (key, value)) in types.iter().enumerate() { + if key == name { + return Ok(FieldInfo { + _name: name.to_string(), + r#type: key.clone(), + base_type: "".to_string(), + index: idx, + }); + } + + for (idx, field) in value.iter().enumerate() { + match field { + Field::SimpleType(simple_field) => { + if simple_field.name == name { + return Ok(FieldInfo { + _name: name.to_string(), + r#type: simple_field.r#type.clone(), + base_type: "".to_string(), + index: idx, + }); + } + } + Field::ParentType(parent_field) => { + if parent_field.name == name { + return Ok(FieldInfo { + _name: name.to_string(), + r#type: parent_field.contains.clone(), + base_type: parent_field.r#type.clone(), + index: idx, + }); + } + } + } + } + } + + Err(Error::InvalidMessageError(format!("Field {} not found in types", name))) +} + +fn get_hex(value: &str) -> Result { + if let Ok(felt) = FieldElement::from_str(value) { + Ok(felt) + } else { + // assume its a short string and encode + cairo_short_string_to_felt(value) + .map_err(|_| Error::InvalidMessageError("Invalid short string".to_string())) + } +} + +impl PrimitiveType { + pub fn encode( + &self, + r#type: &str, + types: &IndexMap>, + preset_types: &IndexMap>, + ctx: &mut Ctx, + ) -> Result { + match self { + PrimitiveType::Object(obj) => { + println!("r#type: {}", r#type); + + ctx.is_preset = preset_types.contains_key(r#type); + + let mut hashes = Vec::new(); + + if ctx.base_type == "enum" { + let (variant_name, value) = obj.first().ok_or_else(|| { + Error::InvalidMessageError("Enum value must be populated".to_string()) + })?; + let variant_type = get_value_type(variant_name, types)?; + + let arr: &Vec = match value { + PrimitiveType::Array(arr) => arr, + _ => { + return Err(Error::InvalidMessageError( + "Enum value must be an array".to_string(), + )); + } + }; + + // variant index + hashes.push(FieldElement::from(variant_type.index as u32)); + + // variant parameters + for (idx, param) in arr.iter().enumerate() { + let field_type = &variant_type + .r#type + .trim_start_matches('(') + .trim_end_matches(')') + .split(',') + .nth(idx) + .ok_or_else(|| { + Error::InvalidMessageError("Invalid enum variant type".to_string()) + })?; + + let field_hash = param.encode(field_type, types, preset_types, ctx)?; + hashes.push(field_hash); + } + + return Ok(poseidon_hash_many(hashes.as_slice())); + } + + let type_hash = + encode_type(r#type, if ctx.is_preset { preset_types } else { types })?; + println!("type_hash: {}", type_hash); + hashes.push(get_selector_from_name(&type_hash).map_err(|_| { + Error::InvalidMessageError(format!("Invalid type {} for selector", r#type)) + })?); + + for (field_name, value) in obj { + // recheck if we're currently in a preset type + ctx.is_preset = preset_types.contains_key(r#type); + + // pass correct types - preset or types + let field_type = get_value_type( + field_name, + if ctx.is_preset { preset_types } else { types }, + )?; + ctx.base_type = field_type.base_type; + ctx.parent_type = r#type.to_string(); + let field_hash = + value.encode(field_type.r#type.as_str(), types, preset_types, ctx)?; + hashes.push(field_hash); + } + + Ok(poseidon_hash_many(hashes.as_slice())) + } + PrimitiveType::Array(array) => Ok(poseidon_hash_many( + array + .iter() + .map(|x| x.encode(r#type.trim_end_matches('*'), types, preset_types, ctx)) + .collect::, _>>()? + .as_slice(), + )), + PrimitiveType::Bool(boolean) => { + let v = + if *boolean { FieldElement::from(1_u32) } else { FieldElement::from(0_u32) }; + Ok(v) + } + PrimitiveType::String(string) => match r#type { + "shortstring" => get_hex(string), + "string" => { + // split the string into short strings and encode + let byte_array = byte_array_from_string(string).map_err(|_| { + Error::InvalidMessageError("Invalid short string".to_string()) + })?; + + let mut hashes = vec![FieldElement::from(byte_array.0.len())]; + + for hash in byte_array.0 { + hashes.push(hash); + } + + hashes.push(byte_array.1); + hashes.push(FieldElement::from(byte_array.2)); + + Ok(poseidon_hash_many(hashes.as_slice())) + } + "selector" => get_selector_from_name(string).map_err(|_| { + Error::InvalidMessageError(format!("Invalid type {} for selector", r#type)) + }), + "felt" => get_hex(string), + "ContractAddress" => get_hex(string), + "ClassHash" => get_hex(string), + "timestamp" => get_hex(string), + "u128" => get_hex(string), + "i128" => get_hex(string), + _ => Err(Error::InvalidMessageError(format!("Invalid type {} for string", r#type))), + }, + PrimitiveType::Number(number) => { + let felt = FieldElement::from_str(&number.to_string()).map_err(|_| { + Error::InvalidMessageError(format!("Invalid number {}", number)) + })?; + Ok(felt) + } + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Domain { + pub name: String, + pub version: String, + #[serde(rename = "chainId")] + pub chain_id: String, + pub revision: Option, +} + +impl Domain { + pub fn new(name: &str, version: &str, chain_id: &str, revision: Option<&str>) -> Self { + Self { + name: name.to_string(), + version: version.to_string(), + chain_id: chain_id.to_string(), + revision: revision.map(|s| s.to_string()), + } + } + + pub fn encode(&self, types: &IndexMap>) -> Result { + let mut object = IndexMap::new(); + + object.insert("name".to_string(), PrimitiveType::String(self.name.clone())); + object.insert("version".to_string(), PrimitiveType::String(self.version.clone())); + object.insert("chainId".to_string(), PrimitiveType::String(self.chain_id.clone())); + if let Some(revision) = &self.revision { + object.insert("revision".to_string(), PrimitiveType::String(revision.clone())); + } + + // we dont need to pass our preset types here. domain should never use a preset type + PrimitiveType::Object(object).encode( + "StarknetDomain", + types, + &IndexMap::new(), + &mut Default::default(), + ) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct TypedData { + pub types: IndexMap>, + #[serde(rename = "primaryType")] + pub primary_type: String, + pub domain: Domain, + pub message: IndexMap, +} + +impl TypedData { + pub fn new( + types: IndexMap>, + primary_type: &str, + domain: Domain, + message: IndexMap, + ) -> Self { + Self { types, primary_type: primary_type.to_string(), domain, message } + } + + pub fn encode(&self, account: FieldElement) -> Result { + let preset_types = get_preset_types(); + + if self.domain.revision.clone().unwrap_or("1".to_string()) != "1" { + return Err(Error::InvalidMessageError( + "Legacy revision 0 is not supported".to_string(), + )); + } + + let prefix_message = cairo_short_string_to_felt("StarkNet Message").unwrap(); + + // encode domain separator + let domain_hash = self.domain.encode(&self.types)?; + + // encode message + let message_hash = PrimitiveType::Object(self.message.clone()).encode( + &self.primary_type, + &self.types, + &preset_types, + &mut Default::default(), + )?; + + // return full hash + Ok(poseidon_hash_many(vec![prefix_message, domain_hash, account, message_hash].as_slice())) + } +} + +#[cfg(test)] +mod tests { + use starknet_core::utils::starknet_keccak; + use starknet_ff::FieldElement; + + use super::*; + + #[test] + fn test_read_json() { + // deserialize from json file + let path = "mocks/mail_StructArray.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + println!("{:?}", typed_data); + + let path = "mocks/example_enum.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + println!("{:?}", typed_data); + + let path = "mocks/example_presetTypes.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + println!("{:?}", typed_data); + } + + #[test] + fn test_type_encode() { + let path = "mocks/example_baseTypes.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + let encoded = encode_type(&typed_data.primary_type, &typed_data.types).unwrap(); + + assert_eq!( + encoded, + "\"Example\"(\"n0\":\"felt\",\"n1\":\"bool\",\"n2\":\"string\",\"n3\":\"selector\",\"\ + n4\":\"u128\",\"n5\":\"ContractAddress\",\"n6\":\"ClassHash\",\"n7\":\"timestamp\",\"\ + n8\":\"shortstring\")" + ); + + let path = "mocks/mail_StructArray.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + let encoded = encode_type(&typed_data.primary_type, &typed_data.types).unwrap(); + + assert_eq!( + encoded, + "\"Mail\"(\"from\":\"Person\",\"to\":\"Person\",\"posts_len\":\"felt\",\"posts\":\"\ + Post*\")\"Person\"(\"name\":\"felt\",\"wallet\":\"felt\")\"Post\"(\"title\":\"felt\",\ + \"content\":\"felt\")" + ); + + let path = "mocks/example_enum.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + let encoded = encode_type(&typed_data.primary_type, &typed_data.types).unwrap(); + + assert_eq!( + encoded, + "\"Example\"(\"someEnum\":\"MyEnum\")\"MyEnum\"(\"Variant 1\":(),\"Variant \ + 2\":(\"u128\",\"u128*\"),\"Variant 3\":(\"u128\"))" + ); + + let path = "mocks/example_presetTypes.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + let encoded = encode_type(&typed_data.primary_type, &typed_data.types).unwrap(); + + assert_eq!(encoded, "\"Example\"(\"n0\":\"TokenAmount\",\"n1\":\"NftId\")"); + } + + #[test] + fn test_selector_encode() { + let selector = PrimitiveType::String("transfer".to_string()); + let selector_hash = + PrimitiveType::String(starknet_keccak("transfer".as_bytes()).to_string()); + + let types = IndexMap::new(); + let preset_types = get_preset_types(); + + let encoded_selector = + selector.encode("selector", &types, &preset_types, &mut Default::default()).unwrap(); + let raw_encoded_selector = + selector_hash.encode("felt", &types, &preset_types, &mut Default::default()).unwrap(); + + assert_eq!(encoded_selector, raw_encoded_selector); + assert_eq!(encoded_selector, starknet_keccak("transfer".as_bytes())); + } + + #[test] + fn test_domain_hash() { + let path = "mocks/example_baseTypes.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + let domain_hash = typed_data.domain.encode(&typed_data.types).unwrap(); + + assert_eq!( + domain_hash, + FieldElement::from_hex_be( + "0x555f72e550b308e50c1a4f8611483a174026c982a9893a05c185eeb85399657" + ) + .unwrap() + ); + } + + #[test] + fn test_message_hash() { + let address = + FieldElement::from_hex_be("0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826").unwrap(); + + let path = "mocks/example_baseTypes.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + let message_hash = typed_data.encode(address).unwrap(); + + assert_eq!( + message_hash, + FieldElement::from_hex_be( + "0x790d9fa99cf9ad91c515aaff9465fcb1c87784d9cfb27271ed193675cd06f9c" + ) + .unwrap() + ); + + let path = "mocks/example_enum.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + let message_hash = typed_data.encode(address).unwrap(); + + assert_eq!( + message_hash, + FieldElement::from_hex_be( + "0x3df10475ad5a8f49db4345a04a5b09164d2e24b09f6e1e236bc1ccd87627cc" + ) + .unwrap() + ); + + let path = "mocks/example_presetTypes.json"; + let file = std::fs::File::open(path).unwrap(); + let reader = std::io::BufReader::new(file); + + let typed_data: TypedData = serde_json::from_reader(reader).unwrap(); + + let message_hash = typed_data.encode(address).unwrap(); + + assert_eq!( + message_hash, + FieldElement::from_hex_be( + "0x26e7b8cedfa63cdbed14e7e51b60ee53ac82bdf26724eb1e3f0710cb8987522" + ) + .unwrap() + ); + } +} diff --git a/crates/torii/libp2p/src/types.rs b/crates/torii/libp2p/src/types.rs index 4b75f26028..a059038d1e 100644 --- a/crates/torii/libp2p/src/types.rs +++ b/crates/torii/libp2p/src/types.rs @@ -1,13 +1,11 @@ use serde::{Deserialize, Serialize}; +use starknet_ff::FieldElement; -#[derive(Debug, Serialize, Deserialize)] -pub(crate) struct ClientMessage { - pub topic: String, - pub data: Vec, -} +use crate::typed_data::TypedData; -#[derive(Debug, Serialize, Deserialize)] -pub(crate) struct ServerMessage { - pub peer_id: Vec, - pub data: Vec, +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Message { + pub message: TypedData, + pub signature_r: FieldElement, + pub signature_s: FieldElement, } diff --git a/crates/torii/migrations/20240314182410_event_model.sql b/crates/torii/migrations/20240314182410_event_model.sql new file mode 100644 index 0000000000..1cb4249e25 --- /dev/null +++ b/crates/torii/migrations/20240314182410_event_model.sql @@ -0,0 +1,21 @@ +CREATE TABLE event_messages ( + id TEXT NOT NULL PRIMARY KEY, + keys TEXT, + event_id TEXT NOT NULL, + model_names TEXT, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX idx_event_messages_keys ON event_messages (keys); +CREATE INDEX idx_event_messages_event_id ON event_messages (event_id); + +CREATE TABLE event_model ( + entity_id TEXT NOT NULL, + model_id TEXT NOT NULL, + UNIQUE (entity_id, model_id), + FOREIGN KEY (entity_id) REFERENCES event_messages (id), + FOREIGN KEY (model_id) REFERENCES models (id) +); +CREATE INDEX idx_event_model_event_id ON event_model (entity_id); +CREATE INDEX idx_event_model_model_id ON event_model (model_id); \ No newline at end of file diff --git a/examples/spawn-and-move/Scarb.toml b/examples/spawn-and-move/Scarb.toml index 2b3f74a104..ac1a76a442 100644 --- a/examples/spawn-and-move/Scarb.toml +++ b/examples/spawn-and-move/Scarb.toml @@ -25,4 +25,4 @@ rpc_url = "http://localhost:5050/" # Default account for katana with seed = 0 account_address = "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" private_key = "0x1800000000300000180000000000030000000000003006001800006600" -world_address = "0xc26dfdc00af6798af8add1ee5d99a716ce9a5766d81218c4fd675ab9889dc4" +world_address = "0x1385f25d20a724edc9c7b3bd9636c59af64cbaf9fcd12f33b3af96b2452f295" diff --git a/examples/spawn-and-move/abis/base/contracts/actions.json b/examples/spawn-and-move/abis/base/contracts/actions.json index 4381a57357..8c08d2c9bf 100644 --- a/examples/spawn-and-move/abis/base/contracts/actions.json +++ b/examples/spawn-and-move/abis/base/contracts/actions.json @@ -164,7 +164,7 @@ { "name": "player", "type": "core::starknet::contract_address::ContractAddress", - "kind": "data" + "kind": "key" }, { "name": "direction", diff --git a/examples/spawn-and-move/abis/deployments/KATANA/contracts/actions.json b/examples/spawn-and-move/abis/deployments/KATANA/contracts/actions.json index de4442aef8..8c08d2c9bf 100644 --- a/examples/spawn-and-move/abis/deployments/KATANA/contracts/actions.json +++ b/examples/spawn-and-move/abis/deployments/KATANA/contracts/actions.json @@ -53,77 +53,6 @@ } ] }, - { - "type": "impl", - "name": "ActionsComputedImpl", - "interface_name": "dojo_examples::actions::IActionsComputed" - }, - { - "type": "struct", - "name": "dojo_examples::models::Vec2", - "members": [ - { - "name": "x", - "type": "core::integer::u32" - }, - { - "name": "y", - "type": "core::integer::u32" - } - ] - }, - { - "type": "struct", - "name": "dojo_examples::models::Position", - "members": [ - { - "name": "player", - "type": "core::starknet::contract_address::ContractAddress" - }, - { - "name": "vec", - "type": "dojo_examples::models::Vec2" - } - ] - }, - { - "type": "interface", - "name": "dojo_examples::actions::IActionsComputed", - "items": [ - { - "type": "function", - "name": "tile_terrain", - "inputs": [ - { - "name": "vec", - "type": "dojo_examples::models::Vec2" - } - ], - "outputs": [ - { - "type": "core::felt252" - } - ], - "state_mutability": "view" - }, - { - "type": "function", - "name": "quadrant", - "inputs": [ - { - "name": "pos", - "type": "dojo_examples::models::Position" - } - ], - "outputs": [ - { - "type": "core::integer::u8" - } - ], - "state_mutability": "view" - } - ] - }, { "type": "impl", "name": "ActionsImpl", @@ -235,7 +164,7 @@ { "name": "player", "type": "core::starknet::contract_address::ContractAddress", - "kind": "data" + "kind": "key" }, { "name": "direction", diff --git a/examples/spawn-and-move/manifests/base/contracts/actions.toml b/examples/spawn-and-move/manifests/base/contracts/actions.toml index dcdbe0e0e6..73e4d0e022 100644 --- a/examples/spawn-and-move/manifests/base/contracts/actions.toml +++ b/examples/spawn-and-move/manifests/base/contracts/actions.toml @@ -1,5 +1,5 @@ kind = "DojoContract" -class_hash = "0x3daa4c1e90a3f816240ee39c4dfdb7e1f3c090d36f2e85f6d0e590de94714ef" +class_hash = "0x2e9ae1db4791defc5137ccfbf4e191f15b0fbc5c701d83ce94086c1a58bc907" abi = "abis/base/contracts/actions.json" reads = [] writes = [] diff --git a/examples/spawn-and-move/manifests/deployments/KATANA.toml b/examples/spawn-and-move/manifests/deployments/KATANA.toml index 635293b441..c769b2de9d 100644 --- a/examples/spawn-and-move/manifests/deployments/KATANA.toml +++ b/examples/spawn-and-move/manifests/deployments/KATANA.toml @@ -1,7 +1,10 @@ [world] kind = "Contract" -class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" -address = "0x1385f25d20a724edc9c7b3bd9636c59af64cbaf9fcd12f33b3af96b2452f295" +class_hash = "0x3d4d3b30023220949d5389c16f8ec7d273f86017e02e7e90a1fb6840326457b" +address = "0x62cb58eb21c24a4aef40058506cf4526f0e6af8e22a2228061692c6559a5cfd" +transaction_hash = "0x16f0f04528e5a4e14a694b4c8ccc8b642b67d8db5d11ae95539f5a52c840e6c" +block_number = 3 +seed = "dojo_examples" name = "dojo::world::world" [base] @@ -11,8 +14,8 @@ name = "dojo::base::base" [[contracts]] kind = "DojoContract" -address = "0x3539c9b89b08095ba914653fb0f20e55d4b172a415beade611bc260b346d0f7" -class_hash = "0xd43bce39922ec3857da231e3bb5c365c29f837c6dce322e4d61dfae83a4c18" +address = "0x61c8186138c7f3de2f9efa25a3d63c94ca18c3e67d4c43cec76a5e22bf04b28" +class_hash = "0x2e9ae1db4791defc5137ccfbf4e191f15b0fbc5c701d83ce94086c1a58bc907" abi = "abis/deployments/KATANA/contracts/actions.json" reads = [ "Moves", @@ -24,7 +27,7 @@ name = "dojo_examples::actions::actions" [[models]] kind = "DojoModel" -class_hash = "0x511fbd833938f5c4b743eea1e67605a125d7ff60e8a09e8dc227ad2fb59ca54" +class_hash = "0x794152290b915762781dc8cdd9e3c8ad9ae34f280f36c607fbe19e2d3c17d61" name = "dojo_examples::models::moves" [[models.members]] @@ -44,7 +47,7 @@ key = false [[models]] kind = "DojoModel" -class_hash = "0xb33ae053213ccb2a57967ffc4411901f3efab24781ca867adcd0b90f2fece5" +class_hash = "0x7aba2182c890a0bcc98666d373e9744e32983f466594d57023f3ab4fc6fb3e9" name = "dojo_examples::models::position" [[models.members]] diff --git a/examples/spawn-and-move/src/actions.cairo b/examples/spawn-and-move/src/actions.cairo index 1f64df12e4..9844d1207e 100644 --- a/examples/spawn-and-move/src/actions.cairo +++ b/examples/spawn-and-move/src/actions.cairo @@ -27,10 +27,11 @@ mod actions { Moved: Moved, } - #[derive(Drop, starknet::Event)] + #[derive(starknet::Event, Model, Copy, Drop, Serde)] struct Moved { + #[key] player: ContractAddress, - direction: Direction + direction: Direction, } // impl: implement functions specified in trait @@ -62,7 +63,7 @@ mod actions { moves.last_direction = direction; let next = next_position(position, direction); set!(world, (moves, next)); - emit!(world, Moved { player, direction }); + emit!(world, (Moved { player, direction })); return (); } }