diff --git a/projects/ssddOnTop/Cargo.lock b/projects/ssddOnTop/Cargo.lock new file mode 100644 index 0000000..b713eb7 --- /dev/null +++ b/projects/ssddOnTop/Cargo.lock @@ -0,0 +1,2875 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + +[[package]] +name = "addr2line" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e1496f8fb1fbf272686b8d37f523dab3e4a7443300055e74cdaa449f3114356" + +[[package]] +name = "ascii_utils" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71938f30533e4d95a6d17aa530939da3842c2ab6f4f84b9dae68447e4129f74a" + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.3.1", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-graphql" +version = "7.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d37c3e9ba322eb00e9e5e997d58f08e8b6de037325b9367ac59bca8e3cd46af" +dependencies = [ + "async-graphql-derive", + "async-graphql-parser", + "async-graphql-value", + "async-stream", + "async-trait", + "base64 0.22.1", + "bytes", + "fast_chemail", + "fnv", + "futures-timer", + "futures-util", + "handlebars", + "http 1.1.0", + "indexmap", + "mime", + "multer", + "num-traits", + "once_cell", + "pin-project-lite", + "regex", + "serde", + "serde_json", + "serde_urlencoded", + "static_assertions_next", + "tempfile", + "thiserror", +] + +[[package]] +name = "async-graphql-derive" +version = "7.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1141703c11c6ad4fa9b3b0e1e476dea01dbd18a44db00f949b804afaab2f344" +dependencies = [ + "Inflector", + "async-graphql-parser", + "darling", + "proc-macro-crate", + "proc-macro2", + "quote", + "strum", + "syn", + "thiserror", +] + +[[package]] +name = "async-graphql-parser" +version = "7.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f66edcce4c38c18f7eb181fdf561c3d3aa2d644ce7358fc7a928c00a4ffef17" +dependencies = [ + "async-graphql-value", + "pest", + "serde", + "serde_json", +] + +[[package]] +name = "async-graphql-value" +version = "7.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0206011cad065420c27988f17dd7fe201a0e056b20c262209b7bffcd6fa176" +dependencies = [ + "bytes", + "indexmap", + "serde", + "serde_json", +] + +[[package]] +name = "async-io" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444b0228950ee6501b3568d3c93bf1176a1fdbc3b758dcd9475046d30f4dc7e8" +dependencies = [ + "async-lock", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-lock" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" +dependencies = [ + "event-listener 5.3.1", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-process" +version = "2.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8a07789659a4d385b79b18b9127fc27e1a59e1e89117c78c5ea3b806f016374" +dependencies = [ + "async-channel 2.3.1", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener 5.3.1", + "futures-lite", + "rustix", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-signal" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "637e00349800c0bdf8bfc21ebbc0b6524abea702b0da4168ac00d070d0c0b9f3" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-std" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" +dependencies = [ + "async-channel 1.9.0", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + +[[package]] +name = "async-trait" +version = "0.1.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blocking" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" +dependencies = [ + "async-channel 2.3.1", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" +dependencies = [ + "serde", +] + +[[package]] +name = "cacache" +version = "12.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "142316461ed3a3dfcba10417317472da5bfd0461e4d276bf7c07b330766d9490" +dependencies = [ + "async-std", + "digest", + "either", + "futures", + "hex", + "libc", + "memmap2", + "miette", + "reflink-copy", + "serde", + "serde_derive", + "serde_json", + "sha1", + "sha2", + "ssri", + "tempfile", + "thiserror", + "walkdir", +] + +[[package]] +name = "cc" +version = "1.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62ac837cdb5cb22e10a256099b4fc502b1dfe560cb282963a974d7abd80e476" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +dependencies = [ + "libc", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derive-getters" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74ef43543e701c01ad77d3a5922755c6a1d71b22d942cb8042be4994b380caff" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_more" +version = "0.99.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn", +] + +[[package]] +name = "derive_setters" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e8ef033054e131169b8f0f9a7af8f5533a9436fadf3c500ed547f730f07090d" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "encoding_rs" +version = "0.8.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "5.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +dependencies = [ + "event-listener 5.3.1", + "pin-project-lite", +] + +[[package]] +name = "fast_chemail" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "495a39d30d624c2caabe6312bfead73e7717692b44e0b32df168c275a2e8e9e4" +dependencies = [ + "ascii_utils", +] + +[[package]] +name = "fastrand" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-lite" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" + +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "handlebars" +version = "5.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d08485b96a0e6393e9e4d1b8d48cf74ad6c063cd905eb33f42c1ce3f0377539b" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "pin-project-lite", +] + +[[package]] +name = "http-cache" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b5ab65432bbdfe8490dfde21d0366353a8d39f2bc24aca0146889f931b0b4b5" +dependencies = [ + "async-trait", + "bincode", + "cacache", + "http 0.2.12", + "http-cache-semantics", + "httpdate", + "moka", + "serde", + "url", +] + +[[package]] +name = "http-cache-reqwest" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8285341ce7e709c56a0f259ff1c789c70edfbaa88acd69d27e4d63980b92dc" +dependencies = [ + "anyhow", + "async-trait", + "http 0.2.12", + "http-cache", + "http-cache-semantics", + "reqwest", + "reqwest-middleware", + "serde", + "task-local-extensions", + "url", +] + +[[package]] +name = "http-cache-semantics" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aec9f678bca3f4a15194b980f20ed9bfe0dd38e8d298c65c559a93dfbd6380a" +dependencies = [ + "http 0.2.12", + "http-serde", + "reqwest", + "serde", + "time", +] + +[[package]] +name = "http-serde" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f560b665ad9f1572cfcaf034f7fb84338a7ce945216d64a90fd81f046a3caee" +dependencies = [ + "http 0.2.12", + "serde", +] + +[[package]] +name = "httparse" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper 0.14.30", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "hyper-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da62f120a8a37763efb0cf8fdf264b884c7b8b9ac8660b900c8661030c00e6ba" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "hyper 1.4.1", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + +[[package]] +name = "indexmap" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" +dependencies = [ + "equivalent", + "hashbrown", + "serde", +] + +[[package]] +name = "ipnet" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.70" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.158" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +dependencies = [ + "value-bag", +] + +[[package]] +name = "macros" +version = "0.1.0" +dependencies = [ + "anyhow", + "macros_common", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "macros_common" +version = "0.1.0" +dependencies = [ + "async-graphql", + "schemars", +] + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "memmap2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" +dependencies = [ + "libc", +] + +[[package]] +name = "miette" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e" +dependencies = [ + "miette-derive", + "once_cell", + "thiserror", + "unicode-width", +] + +[[package]] +name = "miette-derive" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "wasi", + "windows-sys 0.52.0", +] + +[[package]] +name = "moka" +version = "0.12.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32cf62eb4dd975d2dde76432fb1075c49e3ee2331cf36f1f8fd4b66550d32b6f" +dependencies = [ + "async-lock", + "async-trait", + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "event-listener 5.3.1", + "futures-util", + "once_cell", + "parking_lot", + "quanta", + "rustc_version", + "smallvec", + "tagptr", + "thiserror", + "triomphe", + "uuid", +] + +[[package]] +name = "multer" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b" +dependencies = [ + "bytes", + "encoding_rs", + "futures-util", + "http 1.1.0", + "httparse", + "memchr", + "mime", + "spin", + "version_check", +] + +[[package]] +name = "native-tls" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.9", + "libc", +] + +[[package]] +name = "object" +version = "0.36.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssl" +version = "0.10.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +dependencies = [ + "bitflags 2.6.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pest" +version = "2.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c73c26c01b8c87956cea613c907c9d6ecffd8d18a2a5908e5de0adfaa185cea" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "664d22978e2815783adbdd2c588b455b1bd625299ce36b2a99881ac9627e6d8d" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2d5487022d5d33f4c30d91c22afa240ce2a644e87fe08caad974d4eab6badbe" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pest_meta" +version = "2.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0091754bbd0ea592c4deb3a122ce8ecbb0753b738aa82bc055fcc2eccc8d8174" +dependencies = [ + "once_cell", + "pest", + "sha2", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "polling" +version = "3.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2790cd301dec6cd3b7a025e4815cf825724a51c98dccfe6a3e55f05ffb6511" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi 0.4.0", + "pin-project-lite", + "rustix", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "proc-macro-crate" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" +dependencies = [ + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quanta" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5167a477619228a0b284fac2674e3c388cba90631d7b7de620e6f1fcd08da5" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid", + "wasi", + "web-sys", + "winapi", +] + +[[package]] +name = "quote" +version = "1.0.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "raw-cpuid" +version = "11.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb9ee317cfe3fbd54b36a511efc1edd42e216903c9cd575e686dd68a2ba90d8d" +dependencies = [ + "bitflags 2.6.0", +] + +[[package]] +name = "redox_syscall" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" +dependencies = [ + "bitflags 2.6.0", +] + +[[package]] +name = "reflink-copy" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc31414597d1cd7fdd2422798b7652a6329dda0fe0219e6335a13d5bcaa9aeb6" +dependencies = [ + "cfg-if", + "rustix", + "windows", +] + +[[package]] +name = "regex" +version = "1.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.30", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "mime_guess", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + +[[package]] +name = "reqwest-middleware" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a735987236a8e238bf0296c7e351b999c188ccc11477f311b82b55c93984216" +dependencies = [ + "anyhow", + "async-trait", + "http 0.2.12", + "reqwest", + "serde", + "task-local-extensions", + "thiserror", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.38.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +dependencies = [ + "bitflags 2.6.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustversion" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "schemars" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" +dependencies = [ + "dyn-clone", + "schemars_derive", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.6.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" + +[[package]] +name = "serde" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.128" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_json_borrow" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "176a77dea19cf9b2cfe7f9e31966112ef8282a709af7c0a0fb28fc6347c7ba78" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +dependencies = [ + "itoa", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha-1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "ssddOnTop" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-graphql", + "async-graphql-value", + "async-trait", + "bytes", + "derive-getters", + "derive_more", + "derive_setters", + "futures-util", + "fxhash", + "http 1.1.0", + "http-body-util", + "http-cache", + "http-cache-reqwest", + "http-cache-semantics", + "hyper 1.4.1", + "hyper-util", + "indenter", + "indexmap", + "macros", + "macros_common", + "moka", + "nom", + "num_cpus", + "reqwest", + "reqwest-middleware", + "schemars", + "serde", + "serde_json", + "serde_json_borrow", + "serde_path_to_error", + "strum_macros", + "tokio", + "tracing", + "ttl_cache", + "url", +] + +[[package]] +name = "ssri" +version = "9.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da7a2b3c2bc9693bcb40870c4e9b5bf0d79f9cb46273321bf855ec513e919082" +dependencies = [ + "base64 0.21.7", + "digest", + "hex", + "miette", + "serde", + "sha-1", + "sha2", + "thiserror", + "xxhash-rust", +] + +[[package]] +name = "static_assertions_next" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + +[[package]] +name = "syn" +version = "2.0.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + +[[package]] +name = "task-local-extensions" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba323866e5d033818e3240feeb9f7db2c4296674e4d9e16b97b7bf8f490434e8" +dependencies = [ + "pin-utils", +] + +[[package]] +name = "tempfile" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +dependencies = [ + "cfg-if", + "fastrand", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "thiserror" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.40.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" + +[[package]] +name = "toml_edit" +version = "0.22.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" +dependencies = [ + "indexmap", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", +] + +[[package]] +name = "triomphe" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859eb650cfee7434994602c3a68b25d77ad9e68c8a6cd491616ef86661382eb3" + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "ttl_cache" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4189890526f0168710b6ee65ceaedf1460c48a14318ceec933cb26baa492096a" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-width" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + +[[package]] +name = "url" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "uuid" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +dependencies = [ + "getrandom", +] + +[[package]] +name = "value-bag" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" + +[[package]] +name = "web-sys" +version = "0.3.70" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" +dependencies = [ + "windows-core", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-implement" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "xxhash-rust" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a5cbf750400958819fb6178eaa83bee5cd9c29a26a40cc241df8c70fdd46984" diff --git a/projects/ssddOnTop/Cargo.toml b/projects/ssddOnTop/Cargo.toml new file mode 100644 index 0000000..9230540 --- /dev/null +++ b/projects/ssddOnTop/Cargo.toml @@ -0,0 +1,51 @@ +[package] +name = "ssddOnTop" +version = "0.1.0" +edition = "2021" + +[dependencies] +macros = {path = "./macros"} +macros_common = {path = "./macros-common"} +tokio = {version = "1.40.0", features = ["full"]} +tracing = "0.1.40" +hyper = {version = "1.4.1",features = ["http1","server"]} +hyper-util = {version = "0.1.8", features = ["tokio"]} +http-body-util = "0.1.2" +bytes = "1.7.1" +anyhow = "1.0.87" +async-graphql = {version = "7.0.9", features = ["dynamic-schema"]} +serde_json = "1.0.127" +serde = { version = "1.0.210", features = ["derive"] } +async-graphql-value = "7.0.9" +serde_path_to_error = "0.1.16" +num_cpus = "1.16.0" +reqwest = "0.11" +nom = "7.1.3" +serde_json_borrow = "0.6.0" +derive-getters = "0.5.0" +indexmap = "2.5.0" +derive_more = "0.99.18" +indenter = "0.3.3" +derive_setters = "0.1.6" +fxhash = "0.2.1" +strum_macros = "0.26.4" +http-cache-reqwest = { version = "0.13.0", features = [ + "manager-moka", +], default-features = false } +reqwest-middleware = "0.2.5" +http-cache-semantics = { version = "1.0.1", default-features = false, features = ["with_serde", "reqwest"]} +moka = { version = "0.12.7", default-features = false, features = [ + "future", +]} +async-trait = "0.1.82" +ttl_cache = "0.5.1" +futures-util = "0.3.30" +http = "1.1.0" +url = "2.5.2" +schemars = {version = "0.8.17", features = ["derive"]} + +[dev-dependencies] +http-cache = "0.18.0" + +[workspace] +members = [] diff --git a/projects/ssddOnTop/macros-common/Cargo.toml b/projects/ssddOnTop/macros-common/Cargo.toml new file mode 100644 index 0000000..dd822b4 --- /dev/null +++ b/projects/ssddOnTop/macros-common/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "macros_common" +version = "0.1.0" +edition = "2021" + +[dependencies] +async-graphql = "7.0.9" +schemars = { version = "0.8.17" } diff --git a/projects/ssddOnTop/macros-common/src/common.rs b/projects/ssddOnTop/macros-common/src/common.rs new file mode 100644 index 0000000..b5f5508 --- /dev/null +++ b/projects/ssddOnTop/macros-common/src/common.rs @@ -0,0 +1,42 @@ +use async_graphql::{Pos, Positioned}; +use schemars::schema::SchemaObject; + +pub fn get_description(schema: &SchemaObject) -> Option<&String> { + schema + .metadata + .as_ref() + .and_then(|metadata| metadata.description.as_ref()) +} + +pub fn first_char_to_upper(name: &mut String) { + if let Some(first_char) = name.chars().next() { + // Remove the first character and make it uppercase + let first_char_upper = first_char.to_uppercase().to_string(); + + // Remove the first character from the original string + let mut chars = name.chars(); + chars.next(); + + // Replace the original string with the new one + *name = first_char_upper + chars.as_str(); + } +} + +pub fn first_char_to_lower(name: &str) -> String { + if let Some(first_char) = name.chars().next() { + // Remove the first character and make it uppercase + let first_char_upper = first_char.to_lowercase().to_string(); + + // Remove the first character from the original string + let mut chars = name.chars(); + chars.next(); + + return format!("{}{}", first_char_upper, chars.collect::()); + } + + String::new() +} + +pub fn pos(a: A) -> Positioned { + Positioned::new(a, Pos::default()) +} diff --git a/projects/ssddOnTop/macros-common/src/directive_definition.rs b/projects/ssddOnTop/macros-common/src/directive_definition.rs new file mode 100644 index 0000000..908e908 --- /dev/null +++ b/projects/ssddOnTop/macros-common/src/directive_definition.rs @@ -0,0 +1,93 @@ +use std::collections::{BTreeMap, HashSet}; + +use async_graphql::parser::types::{DirectiveLocation, TypeSystemDefinition}; +use async_graphql::Name; +use schemars::schema::{RootSchema, Schema, SchemaObject}; + +use crate::common::{first_char_to_lower, first_char_to_upper, get_description, pos}; +use crate::enum_definition::{into_enum_definition, into_enum_value}; +use crate::input_definition::{into_input_definition, into_input_value_definition}; + +pub trait DirectiveDefinition { + fn directive_definition(generated_types: &mut HashSet) -> Vec; +} + +#[derive(Clone)] +pub struct Attrs { + pub name: &'static str, + pub repeatable: bool, + pub locations: Vec<&'static str>, + pub is_lowercase_name: bool, +} + +pub fn from_directive_location(str: DirectiveLocation) -> String { + match str { + DirectiveLocation::Schema => String::from("SCHEMA"), + DirectiveLocation::Object => String::from("OBJECT"), + DirectiveLocation::FieldDefinition => String::from("FIELD_DEFINITION"), + DirectiveLocation::EnumValue => String::from("ENUM_VALUE"), + _ => String::from("FIELD_DEFINITION"), + } +} + +fn into_directive_location(str: &str) -> DirectiveLocation { + match str { + "Schema" => DirectiveLocation::Schema, + "Object" => DirectiveLocation::Object, + "FieldDefinition" => DirectiveLocation::FieldDefinition, + "EnumValue" => DirectiveLocation::EnumValue, + _ => DirectiveLocation::FieldDefinition, + } +} + +pub fn into_directive_definition( + root_schema: RootSchema, + attrs: Attrs, + generated_types: &mut HashSet, +) -> Vec { + let mut service_doc_definitions = vec![]; + let definitions: BTreeMap = root_schema.definitions; + let schema: SchemaObject = root_schema.schema; + let description = get_description(&schema); + + for (mut name, schema) in definitions.into_iter() { + if generated_types.contains(&name) { + continue; + } + // the definition could either be an enum or a type + // we don't know which one is it, so we first try to get an EnumValue + // if into_enum_value return Some we can be sure it's an Enum + if let Some(enum_values) = into_enum_value(&schema) { + service_doc_definitions.push(into_enum_definition(enum_values, &name)); + generated_types.insert(name.to_string()); + } else { + generated_types.insert(name.to_string()); + first_char_to_upper(&mut name); + service_doc_definitions.push(into_input_definition( + schema.clone().into_object(), + name.as_str(), + )); + } + } + + let name = if attrs.is_lowercase_name { + attrs.name.to_lowercase() + } else { + first_char_to_lower(attrs.name) + }; + + let directve_definition = + TypeSystemDefinition::Directive(pos(async_graphql::parser::types::DirectiveDefinition { + description: description.map(|inner| pos(inner.clone())), + name: pos(Name::new(name)), + arguments: into_input_value_definition(&schema), + is_repeatable: attrs.repeatable, + locations: attrs + .locations + .into_iter() + .map(|val| pos(into_directive_location(val))) + .collect(), + })); + service_doc_definitions.push(directve_definition); + service_doc_definitions +} diff --git a/projects/ssddOnTop/macros-common/src/enum_definition.rs b/projects/ssddOnTop/macros-common/src/enum_definition.rs new file mode 100644 index 0000000..de06522 --- /dev/null +++ b/projects/ssddOnTop/macros-common/src/enum_definition.rs @@ -0,0 +1,58 @@ +use async_graphql::parser::types::{ + EnumType, EnumValueDefinition, TypeDefinition, TypeKind, TypeSystemDefinition, +}; +use async_graphql::{Name, Positioned}; +use schemars::schema::Schema; + +#[derive(Debug)] +pub struct EnumValue { + pub variants: Vec, + pub description: Option>, +} + +use crate::common::{get_description, pos}; + +pub fn into_enum_definition(enum_value: EnumValue, name: &str) -> TypeSystemDefinition { + let mut enum_value_definition = vec![]; + for enum_value in enum_value.variants { + let formatted_value: String = enum_value + .to_string() + .chars() + .filter(|ch| ch != &'"') + .collect(); + enum_value_definition.push(pos(EnumValueDefinition { + value: pos(Name::new(formatted_value)), + description: None, + directives: vec![], + })); + } + + TypeSystemDefinition::Type(pos(TypeDefinition { + name: pos(Name::new(name)), + kind: TypeKind::Enum(EnumType { + values: enum_value_definition, + }), + description: enum_value.description, + directives: vec![], + extend: false, + })) +} + +pub fn into_enum_value(obj: &Schema) -> Option { + match obj { + Schema::Object(schema_object) => { + let description = get_description(schema_object); + if let Some(enum_values) = &schema_object.enum_values { + return Some(EnumValue { + variants: enum_values + .iter() + .map(|val| val.to_string()) + .collect::>(), + description: description.map(|description| pos(description.to_owned())), + }); + } + None + } + _ => None, + } +} diff --git a/projects/ssddOnTop/macros-common/src/input_definition.rs b/projects/ssddOnTop/macros-common/src/input_definition.rs new file mode 100644 index 0000000..eea4bc4 --- /dev/null +++ b/projects/ssddOnTop/macros-common/src/input_definition.rs @@ -0,0 +1,192 @@ +use async_graphql::parser::types::{ + BaseType, InputObjectType, InputValueDefinition, Type, TypeDefinition, TypeKind, + TypeSystemDefinition, +}; +use async_graphql::{Name, Positioned}; +use schemars::schema::{ + ArrayValidation, InstanceType, ObjectValidation, Schema, SchemaObject, SingleOrVec, +}; + +use crate::common::{first_char_to_upper, get_description, pos}; + +pub trait InputDefinition { + fn input_definition() -> TypeSystemDefinition; +} + +pub fn into_input_definition(schema: SchemaObject, name: &str) -> TypeSystemDefinition { + let description = get_description(&schema); + + TypeSystemDefinition::Type(pos(TypeDefinition { + name: pos(Name::new(name)), + kind: TypeKind::InputObject(InputObjectType { + fields: into_input_value_definition(&schema), + }), + description: description.map(|inner| pos(inner.clone())), + directives: vec![], + extend: false, + })) +} + +pub fn into_input_value_definition(schema: &SchemaObject) -> Vec> { + let mut arguments_type = vec![]; + if let Some(subschema) = schema.subschemas.clone() { + let list = subschema.any_of.or(subschema.all_of).or(subschema.one_of); + if let Some(list) = list { + for schema in list { + let schema_object = schema.into_object(); + arguments_type.extend(build_arguments_type(&schema_object)); + } + + return arguments_type; + } + } + + build_arguments_type(schema) +} + +fn build_arguments_type(schema: &SchemaObject) -> Vec> { + let mut arguments = vec![]; + if let Some(properties) = schema + .object + .as_ref() + .map(|object| object.properties.clone()) + { + for (name, property) in properties.into_iter() { + let property = property.into_object(); + let description = get_description(&property); + let definition = pos(InputValueDefinition { + description: description.map(|inner| pos(inner.to_owned())), + name: pos(Name::new(&name)), + ty: pos(determine_input_value_type_from_schema( + name, + property.clone(), + )), + default_value: None, + directives: Vec::new(), + }); + + arguments.push(definition); + } + } + + arguments +} + +fn determine_input_value_type_from_schema(mut name: String, schema: SchemaObject) -> Type { + first_char_to_upper(&mut name); + if let Some(instance_type) = &schema.instance_type { + match instance_type { + SingleOrVec::Single(typ) => match **typ { + InstanceType::Null + | InstanceType::Boolean + | InstanceType::Number + | InstanceType::String + | InstanceType::Integer => Type { + nullable: false, + base: BaseType::Named(Name::new(get_instance_type_name(typ))), + }, + _ => determine_type_from_schema(name, &schema), + }, + SingleOrVec::Vec(typ) => match typ.first().unwrap() { + InstanceType::Null + | InstanceType::Boolean + | InstanceType::Number + | InstanceType::String + | InstanceType::Integer => Type { + nullable: true, + base: BaseType::Named(Name::new(get_instance_type_name(typ.first().unwrap()))), + }, + _ => determine_type_from_schema(name, &schema), + }, + } + } else { + determine_type_from_schema(name, &schema) + } +} + +fn determine_type_from_schema(name: String, schema: &SchemaObject) -> Type { + if let Some(arr_valid) = &schema.array { + return determine_type_from_arr_valid(name, arr_valid); + } + + if let Some(typ) = &schema.object { + return determine_type_from_object_valid(name, typ); + } + + if let Some(subschema) = schema.subschemas.clone().into_iter().next() { + let list = subschema.any_of.or(subschema.all_of).or(subschema.one_of); + + if let Some(list) = list { + if let Some(Schema::Object(obj)) = list.first() { + if let Some(reference) = &obj.reference { + return determine_type_from_reference(reference); + } + } + } + } + + if let Some(reference) = &schema.reference { + return determine_type_from_reference(reference); + } + + Type { + nullable: true, + base: BaseType::Named(Name::new("JSON")), + } +} + +fn determine_type_from_reference(reference: &str) -> Type { + let mut name = reference.split('/').last().unwrap().to_string(); + first_char_to_upper(&mut name); + Type { + nullable: true, + base: BaseType::Named(Name::new(name)), + } +} + +fn determine_type_from_arr_valid(name: String, array_valid: &ArrayValidation) -> Type { + if let Some(items) = &array_valid.items { + match items { + SingleOrVec::Single(schema) => Type { + nullable: true, + base: BaseType::List(Box::new(determine_input_value_type_from_schema( + name, + schema.clone().into_object(), + ))), + }, + SingleOrVec::Vec(schemas) => Type { + nullable: true, + base: BaseType::List(Box::new(determine_input_value_type_from_schema( + name, + schemas[0].clone().into_object(), + ))), + }, + } + } else { + Type { + nullable: true, + base: BaseType::Named(Name::new("JSON")), + } + } +} + +fn determine_type_from_object_valid(name: String, typ: &ObjectValidation) -> Type { + if !typ.properties.is_empty() { + Type { + nullable: true, + base: BaseType::Named(Name::new(name)), + } + } else { + Type { + nullable: true, + base: BaseType::Named(Name::new("JSON")), + } + } +} + +fn get_instance_type_name(typ: &InstanceType) -> String { + match typ { + &InstanceType::Integer => "Int".to_string(), + _ => format!("{:?}", typ), + } +} diff --git a/projects/ssddOnTop/macros-common/src/lib.rs b/projects/ssddOnTop/macros-common/src/lib.rs new file mode 100644 index 0000000..8a908eb --- /dev/null +++ b/projects/ssddOnTop/macros-common/src/lib.rs @@ -0,0 +1,57 @@ +use async_graphql::parser::types::{ServiceDocument, TypeSystemDefinition}; +use schemars::schema::RootSchema; +use schemars::JsonSchema; +mod common; +pub mod directive_definition; +mod enum_definition; +pub mod input_definition; +pub mod scalar_definition; + +pub fn into_schemars() -> RootSchema +where + T: JsonSchema, +{ + schemars::schema_for!(T) +} + +pub struct ServiceDocumentBuilder { + definitions: Vec, +} + +impl Default for ServiceDocumentBuilder { + fn default() -> Self { + Self::new() + } +} + +impl ServiceDocumentBuilder { + pub fn new() -> Self { + Self { + definitions: vec![], + } + } + + pub fn add_directive( + mut self, + definitions: Vec, + ) -> ServiceDocumentBuilder { + self.definitions.extend(definitions); + self + } + + pub fn add_scalar(mut self, definitions: TypeSystemDefinition) -> ServiceDocumentBuilder { + self.definitions.push(definitions); + self + } + + pub fn add_input(mut self, definitions: TypeSystemDefinition) -> ServiceDocumentBuilder { + self.definitions.push(definitions); + self + } + + pub fn build(self) -> ServiceDocument { + ServiceDocument { + definitions: self.definitions, + } + } +} diff --git a/projects/ssddOnTop/macros-common/src/scalar_definition.rs b/projects/ssddOnTop/macros-common/src/scalar_definition.rs new file mode 100644 index 0000000..bdb3808 --- /dev/null +++ b/projects/ssddOnTop/macros-common/src/scalar_definition.rs @@ -0,0 +1,21 @@ +use async_graphql::parser::types::{TypeDefinition, TypeKind, TypeSystemDefinition}; +use async_graphql::Name; +use schemars::schema::{Schema, SchemaObject}; + +use crate::common::{get_description, pos}; + +pub trait ScalarDefinition { + fn scalar_definition() -> TypeSystemDefinition; +} + +pub fn into_scalar_definition(root_schema: Schema, name: &str) -> TypeSystemDefinition { + let schema: SchemaObject = root_schema.into_object(); + let description = get_description(&schema); + TypeSystemDefinition::Type(pos(TypeDefinition { + name: pos(Name::new(name)), + kind: TypeKind::Scalar, + description: description.map(|inner| pos(inner.clone())), + directives: vec![], + extend: false, + })) +} diff --git a/projects/ssddOnTop/macros/Cargo.toml b/projects/ssddOnTop/macros/Cargo.toml new file mode 100644 index 0000000..5885e09 --- /dev/null +++ b/projects/ssddOnTop/macros/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "macros" +version = "0.1.0" +edition = "2021" + +[lib] +proc-macro = true + +[dependencies] +syn = { version = "2.0.60", features = ["derive", "full"] } +quote = "1.0.36" +proc-macro2 = "1.0.81" +macros_common = {path = "../macros-common"} +anyhow = "1.0.88" \ No newline at end of file diff --git a/projects/ssddOnTop/macros/src/document_definition.rs b/projects/ssddOnTop/macros/src/document_definition.rs new file mode 100644 index 0000000..76c4bef --- /dev/null +++ b/projects/ssddOnTop/macros/src/document_definition.rs @@ -0,0 +1,93 @@ +extern crate proc_macro; +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, DeriveInput}; + +#[derive(Default)] +struct DirectiveDefinitionAttr { + is_repeatable: bool, + is_lowercase_name: bool, + locations: Option, +} + +fn get_directive_definition_attr(input: &DeriveInput) -> syn::Result { + let mut directive_definition_attr: DirectiveDefinitionAttr = Default::default(); + for attr in input.attrs.iter() { + if attr.path().is_ident("directive_definition") { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("repeatable") { + directive_definition_attr.is_repeatable = true; + } + + if meta.path.is_ident("locations") { + let value = meta.value()?; + let s: syn::LitStr = value.parse()?; + directive_definition_attr.locations = Some(s.value()); + } + + if meta.path.is_ident("lowercase_name") { + directive_definition_attr.is_lowercase_name = true; + } + + Ok(()) + })?; + } + } + + Ok(directive_definition_attr) +} + +pub fn expand_directive_definition(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let struct_identifier = &input.ident; + + let directive_definition_attr = get_directive_definition_attr(&input); + if let Err(err) = directive_definition_attr { + panic!("{}", err); + } + + let directive_definition_attr = directive_definition_attr.unwrap(); + let is_repeatable = directive_definition_attr.is_repeatable; + let is_lowercase_name = directive_definition_attr.is_lowercase_name; + let locations = if let Some(locations) = directive_definition_attr.locations { + locations + .split(",") + .map(|location| location.trim().to_string()) + .collect::>() + } else { + vec![] + }; + + let expanded = quote! { + impl macros_common::directive_definition::DirectiveDefinition for #struct_identifier { + fn directive_definition(generated_types: &mut std::collections::HashSet) -> Vec { + let schemars = macros_common::into_schemars::(); + let attr = macros_common::directive_definition::Attrs { + name: stringify!(#struct_identifier), + repeatable: #is_repeatable, + locations: vec![#(#locations),*], + is_lowercase_name: #is_lowercase_name + }; + macros_common::directive_definition::into_directive_definition(schemars, attr, generated_types) + } + } + }; + + TokenStream::from(expanded) +} + +pub fn expand_input_definition(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let struct_identifier = &input.ident; + + let expanded = quote! { + impl macros_common::input_definition::InputDefinition for #struct_identifier { + fn input_definition() -> async_graphql::parser::types::TypeSystemDefinition { + let schemars = macros_common::into_schemars::(); + macros_common::input_definition::into_input_definition(schemars.schema, stringify!(#struct_identifier)) + } + } + }; + + TokenStream::from(expanded) +} diff --git a/projects/ssddOnTop/macros/src/gen.rs b/projects/ssddOnTop/macros/src/gen.rs new file mode 100644 index 0000000..f063551 --- /dev/null +++ b/projects/ssddOnTop/macros/src/gen.rs @@ -0,0 +1,100 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, Attribute, Data, DeriveInput, Expr, Meta}; + +fn extract_gen_doc_ty(attrs: &[Attribute]) -> String { + attrs + .iter() + .filter_map(|attr| { + if attr.path().is_ident("gen_doc") { + let meta_list = attr.meta.require_list().ok()?; + let expr = meta_list.parse_args::().ok()?; + if let Expr::Assign(assign) = expr { + if let Expr::Path(expr_path) = assign.left.as_ref() { + let segment = expr_path.path.segments.first()?; + if segment.ident == "ty" { + if let Expr::Lit(expr_lit) = *assign.right { + if let syn::Lit::Str(lit_str) = expr_lit.lit { + return Some(lit_str.value().trim().to_string()); + } + } + } + } + } + } + None + }) + .collect::>() + .join("") + .to_string() +} + +pub fn doc(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + + let name = input.ident; + + let variants = if let Data::Enum(data_enum) = input.data { + data_enum.variants + } else { + panic!("Doc can only be used on enums"); + }; + + let match_arms = variants.iter().map(|variant| { + let variant_name = &variant.ident; + let docs = variant + .attrs + .iter() + .filter_map(|attr| { + if attr.path().is_ident("doc") { + if let Meta::NameValue(value) = &attr.meta { + if let Expr::Lit(lit) = &value.value { + if let syn::Lit::Str(lit_str) = &lit.lit { + return Some(lit_str.value().trim().to_string()); + } + } + } + } + None + }) + .collect::>() + .join("\n"); + + quote! { + #name::#variant_name => #docs.to_string(), + } + }); + + let match_arms_ty = variants.iter().map(|variant| { + let variant_name = &variant.ident; + let ty = extract_gen_doc_ty(&variant.attrs).to_lowercase(); + + let instance_type = match ty.as_str() { + "integer" => quote! { InstanceType::Integer }, + "string" => quote! { InstanceType::String }, + "object" => quote! { InstanceType::Integer }, + _ => quote! { InstanceType::Null }, + }; + + quote! { + #name::#variant_name => #instance_type, + } + }); + + let expanded = quote! { + impl #name { + pub fn doc(&self) -> String { + match self { + #(#match_arms)* + } + } + pub fn ty(&self) -> InstanceType { + match self { + #(#match_arms_ty)* + } + } + } + }; + + TokenStream::from(expanded) +} diff --git a/projects/ssddOnTop/macros/src/lib.rs b/projects/ssddOnTop/macros/src/lib.rs new file mode 100644 index 0000000..377a4ef --- /dev/null +++ b/projects/ssddOnTop/macros/src/lib.rs @@ -0,0 +1,54 @@ +extern crate proc_macro; + +use proc_macro::TokenStream; + +mod document_definition; +mod gen; +mod merge_right; +mod resolver; + +use crate::document_definition::{expand_directive_definition, expand_input_definition}; +use crate::merge_right::expand_merge_right_derive; +use crate::resolver::expand_resolver_derive; + +#[proc_macro_derive(MergeRight, attributes(merge_right))] +pub fn merge_right_derive(input: TokenStream) -> TokenStream { + expand_merge_right_derive(input) +} + +#[proc_macro_derive(DirectiveDefinition, attributes(directive_definition))] +pub fn directive_definitions_derive(input: TokenStream) -> TokenStream { + expand_directive_definition(input) +} + +#[proc_macro_derive(Doc, attributes(gen_doc))] +pub fn scalar_definition_derive(input: TokenStream) -> TokenStream { + gen::doc(input) +} + +#[proc_macro] +pub fn gen_doc(item: TokenStream) -> TokenStream { + let input = syn::parse_macro_input!(item as syn::DeriveInput); + let name = &input.ident; + let gen = quote::quote! { + impl #name { + pub fn doc() -> &'static str { + stringify!(#name) + } + } + }; + TokenStream::from(quote::quote! { + #input + #gen + }) +} + +#[proc_macro_derive(InputDefinition)] +pub fn input_definition_derive(input: TokenStream) -> TokenStream { + expand_input_definition(input) +} + +#[proc_macro_derive(CustomResolver)] +pub fn resolver_derive(input: TokenStream) -> TokenStream { + expand_resolver_derive(input) +} diff --git a/projects/ssddOnTop/macros/src/merge_right.rs b/projects/ssddOnTop/macros/src/merge_right.rs new file mode 100644 index 0000000..073f47b --- /dev/null +++ b/projects/ssddOnTop/macros/src/merge_right.rs @@ -0,0 +1,154 @@ +extern crate proc_macro; + +use proc_macro::TokenStream; +use quote::quote; +use syn::spanned::Spanned; +use syn::{parse_macro_input, Data, DeriveInput, Fields}; + +const MERGE_RIGHT_FN: &str = "merge_right_fn"; +const MERGE_RIGHT: &str = "merge_right"; + +#[derive(Default)] +struct Attrs { + merge_right_fn: Option, +} + +fn get_attrs(attrs: &[syn::Attribute]) -> syn::Result { + let mut attrs_ret = Attrs::default(); + for attr in attrs { + if attr.path().is_ident(MERGE_RIGHT) { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident(MERGE_RIGHT_FN) { + let p: syn::Expr = meta.value()?.parse()?; + let lit = if let syn::Expr::Lit(syn::ExprLit { + lit: syn::Lit::Str(lit), + .. + }) = p + { + let suffix = lit.suffix(); + if !suffix.is_empty() { + return Err(syn::Error::new( + lit.span(), + format!("unexpected suffix `{}` on string literal", suffix), + )); + } + lit + } else { + return Err(syn::Error::new( + p.span(), + format!( + "expected merge_right {} attribute to be a string.", + MERGE_RIGHT_FN + ), + )); + }; + let expr_path: syn::ExprPath = lit.parse()?; + attrs_ret.merge_right_fn = Some(expr_path); + Ok(()) + } else { + Err(syn::Error::new(attr.span(), "Unknown helper attribute.")) + } + })?; + } + } + Ok(attrs_ret) +} + +pub fn expand_merge_right_derive(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + + let name = input.ident.clone(); + let generics = input.generics.clone(); + let gen = match input.data { + // Implement for structs + Data::Struct(data) => { + let fields = if let Fields::Named(fields) = data.fields { + fields.named + } else { + // Adjust this match arm to handle other kinds of struct fields (unnamed/tuple + // structs, unit structs) + unimplemented!() + }; + + let merge_logic = fields.iter().map(|f| { + let attrs = get_attrs(&f.attrs); + if let Err(err) = attrs { + panic!("{}", err); + } + let attrs = attrs.unwrap(); + let name = &f.ident; + if let Some(merge_right_fn) = attrs.merge_right_fn { + quote! { + #name: #merge_right_fn(self.#name, other.#name), + } + } else { + quote! { + #name: self.#name.merge_right(other.#name), + } + } + }); + + let generics_lt = generics.lt_token; + let generics_gt = generics.gt_token; + let generics_params = generics.params; + + let generics_del = quote! { + #generics_lt #generics_params #generics_gt + }; + + quote! { + impl #generics_del MergeRight for #name #generics_del { + fn merge_right(self, other: Self) -> Self { + Self { + #(#merge_logic)* + } + } + } + } + } + // Implement for enums + Data::Enum(_) => quote! { + impl MergeRight for #name { + fn merge_right(self, other: Self) -> Self { + other + } + } + }, + // Optionally handle or disallow unions + Data::Union(_) => { + return syn::Error::new_spanned(input, "Union types are not supported by MergeRight") + .to_compile_error() + .into() + } + }; + + gen.into() +} + +#[cfg(test)] +mod tests { + use syn::{parse_quote, Attribute}; + + use super::*; + + #[test] + fn test_get_attrs_invalid_type() { + let attrs: Vec = vec![parse_quote!(#[merge_right(merge_right_fn = 123)])]; + let result = get_attrs(&attrs); + assert!( + result.is_err(), + "Expected error with non-string type for `merge_right_fn`" + ); + } + + #[test] + fn test_get_attrs_unexpected_suffix() { + let attrs: Vec = + vec![parse_quote!(#[merge_right(merge_right_fn = "some_fn()")])]; + let result = get_attrs(&attrs); + assert!( + result.is_err(), + "Expected error with unexpected suffix on string literal" + ); + } +} diff --git a/projects/ssddOnTop/macros/src/resolver.rs b/projects/ssddOnTop/macros/src/resolver.rs new file mode 100644 index 0000000..7f120c9 --- /dev/null +++ b/projects/ssddOnTop/macros/src/resolver.rs @@ -0,0 +1,91 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, Data, DeriveInput, Fields}; + +pub fn expand_resolver_derive(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let name = &input.ident; + + let variants = if let Data::Enum(data_enum) = &input.data { + data_enum + .variants + .iter() + .map(|variant| { + let variant_name = &variant.ident; + let ty = match &variant.fields { + Fields::Unnamed(fields) if fields.unnamed.len() == 1 => &fields.unnamed[0].ty, + _ => panic!("Resolver variants must have exactly one unnamed field"), + }; + + (variant_name, ty) + }) + .collect::>() + } else { + panic!("Resolver can only be derived for enums"); + }; + + let variant_parsers = variants.iter().map(|(variant_name, ty)| { + quote! { + valid = valid.and(<#ty>::from_directives(directives.iter()).map(|resolver| { + if let Some(resolver) = resolver { + let directive_name = <#ty>::trace_name(); + if !resolvable_directives.contains(&directive_name) { + resolvable_directives.push(directive_name); + } + result = Some(Self::#variant_name(resolver)); + } + })); + } + }); + + let match_arms_to_directive = variants.iter().map(|(variant_name, _ty)| { + quote! { + Self::#variant_name(d) => d.to_directive(), + } + }); + + let match_arms_directive_name = variants.iter().map(|(variant_name, ty)| { + quote! { + Self::#variant_name(_) => <#ty>::directive_name(), + } + }); + + let expanded = quote! { + impl #name { + pub fn from_directives( + directives: &[Positioned], + ) -> anyhow::Result> { + let mut result = None; + let mut resolvable_directives = Vec::new(); + let mut valid = Ok(()); + + #(#variant_parsers)* + + valid.and_then(|_| { + if resolvable_directives.len() > 1 { + Err(anyhow::anyhow!( + "Multiple resolvers detected [{}]", + resolvable_directives.join(", ") + )) + } else { + Ok(result) + } + }) + } + + pub fn to_directive(&self) -> ConstDirective { + match self { + #(#match_arms_to_directive)* + } + } + + pub fn directive_name(&self) -> String { + match self { + #(#match_arms_directive_name)* + } + } + } + }; + + TokenStream::from(expanded) +} diff --git a/projects/ssddOnTop/run.sh b/projects/ssddOnTop/run.sh new file mode 100755 index 0000000..3db4ae1 --- /dev/null +++ b/projects/ssddOnTop/run.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +set -e + +cargo run -p ssddOnTop --release \ No newline at end of file diff --git a/projects/ssddOnTop/schema/schema.graphql b/projects/ssddOnTop/schema/schema.graphql new file mode 100644 index 0000000..7c6b866 --- /dev/null +++ b/projects/ssddOnTop/schema/schema.graphql @@ -0,0 +1,40 @@ +schema +@server(port: 8000) +@upstream(baseURL: "http://localhost:3000") { + query: Query +} + +type Query { + posts: [Post] @http(path: "/posts") + post(id: Int!): Post @http(path: "/posts/{{.args.id}}") + users: [User] @http(path: "/users") + user(id: Int!): User @http(path: "/users/{{.args.id}}") +} + +type Post { + id: Int + userId: Int! + title: String + body: String + user: User @http(path: "/users/{{.value.userId}}") +} + +type User { + id: Int + name: String + username: String + email: String + address: Address + phone: String + website: String +} + +type Address { + zipcode: String + geo: Geo +} + +type Geo { + lat: Float + lng: Float +} diff --git a/projects/ssddOnTop/src/app_ctx.rs b/projects/ssddOnTop/src/app_ctx.rs new file mode 100644 index 0000000..eecf49c --- /dev/null +++ b/projects/ssddOnTop/src/app_ctx.rs @@ -0,0 +1,15 @@ +use crate::blueprint::Blueprint; +use crate::target_runtime::TargetRuntime; +use std::sync::Arc; + +#[derive(Clone)] +pub struct AppCtx { + pub runtime: TargetRuntime, + pub blueprint: Arc, +} + +impl AppCtx { + pub fn new(runtime: TargetRuntime, blueprint: Arc) -> Self { + Self { runtime, blueprint } + } +} diff --git a/projects/ssddOnTop/src/blueprint/blueprint.rs b/projects/ssddOnTop/src/blueprint/blueprint.rs new file mode 100644 index 0000000..917d0ce --- /dev/null +++ b/projects/ssddOnTop/src/blueprint/blueprint.rs @@ -0,0 +1,213 @@ +use crate::blueprint::definitions::to_definitions; +use crate::blueprint::model::{Arg, ArgId, Field, FieldId, FieldName, TypeName}; +use crate::blueprint::wrapping_type::Type; +use crate::config::{Config, RootSchema}; +use crate::ir::IR; +use derive_setters::Setters; +use serde_json::Value; +use std::collections::HashMap; +use std::net::{IpAddr, SocketAddr}; + +#[derive(Debug, Eq, Hash, PartialEq)] +pub struct FieldHash { + pub name: FieldName, + pub id: TypeName, +} + +impl FieldHash { + pub fn new(name: FieldName, id: TypeName) -> Self { + Self { name, id } + } +} + +#[derive(Debug)] +pub struct Blueprint { + pub fields: HashMap, + pub server: Server, + pub upstream: Upstream, + pub schema: RootSchema, +} + +#[derive(Clone, Debug)] +pub struct Directive { + pub name: String, + pub arguments: HashMap, + pub index: usize, +} + +#[derive(Clone, Debug)] +pub enum Definition { + Interface(InterfaceTypeDefinition), + Object(ObjectTypeDefinition), + InputObject(InputObjectTypeDefinition), +} + +#[derive(Clone, Debug)] +pub struct InputObjectTypeDefinition { + pub name: String, + pub fields: Vec, +} + +#[derive(Clone, Debug)] +pub struct ObjectTypeDefinition { + pub name: String, + pub fields: Vec, +} + +#[derive(Clone, Debug)] +pub struct InterfaceTypeDefinition { + pub name: String, + pub fields: Vec, +} + +#[derive(Clone, Debug, Setters, Default)] +pub struct FieldDefinition { + pub name: String, + pub args: Vec, + pub of_type: Type, + pub resolver: Option, + pub directives: Vec, +} + +#[derive(Clone, Debug)] +pub struct InputFieldDefinition { + pub name: String, + pub of_type: Type, +} + +#[derive(Clone, Debug)] +pub struct Server { + pub host: IpAddr, + pub port: u16, +} + +impl Default for Server { + fn default() -> Self { + Self { + host: IpAddr::from([127, 0, 0, 1]), + port: 8000, + } + } +} + +impl Server { + pub fn addr(&self) -> SocketAddr { + SocketAddr::new(self.host, self.port) + } +} +#[derive(Clone, Debug, Default)] +pub struct Upstream { + pub base_url: Option, + pub http_cache: u64, +} + +impl TryFrom<&Config> for Blueprint { + type Error = anyhow::Error; + + fn try_from(config: &Config) -> Result { + let qry = config + .schema + .query + .as_ref() + .ok_or(anyhow::anyhow!("Query not found"))?; + let defs = to_definitions(config)?; + + let fields = fields_to_map(qry, config, defs); + + let server = Server { + host: IpAddr::from([127, 0, 0, 1]), + port: config.server.port, + }; + let upstream = Upstream { + base_url: config.upstream.base_url.clone(), + http_cache: config.upstream.http_cache.unwrap_or(10000), + }; + Ok(Blueprint { + fields, + server, + upstream, + schema: config.schema.clone(), + }) + } +} + +fn fields_to_map(qry: &str, config: &Config, defs: Vec) -> HashMap { + let mut fields = HashMap::new(); + populate_nested_field(config, qry, &mut fields, &defs); + fields +} + +fn populate_nested_field( + config: &Config, + ty_name: &str, + field_map: &mut HashMap, + defs: &[Definition], +) { + // I don't have additional check for scalars as of now.. + // This should work fine + if let Some(ty) = config.types.get(ty_name) { + for (field_name, field) in ty.fields.iter() { + let field_name = FieldName(field_name.clone()); + populate_nested_field(config, field.ty_of.name(), field_map, defs); + let field = Field { + name: field_name.clone(), + type_of: field.ty_of.clone(), + ir: { + let x = defs.iter().find_map(|def| match def { + Definition::Interface(int) => Some( + int.fields + .iter() + .find(|f| field_name.0.eq(&f.name) && int.name.eq(ty_name))? + .clone(), + ), + Definition::Object(obj) => Some( + obj.fields + .iter() + .find(|f| field_name.0.eq(&f.name) && obj.name.eq(ty_name))? + .clone(), + ), + Definition::InputObject(_) => None, + }); + // println!("resolver for: {} is {:?}", field_name.0, x); + + x.and_then(|x| x.resolver.clone()) + }, + args: field + .args + .iter() + .map(|(arg_name, arg)| { + let arg = Arg { + name: arg_name.clone(), + type_of: arg.type_of.clone(), + }; + arg + }) + .collect(), + }; + + field_map.insert( + FieldHash { + name: field_name, + id: TypeName(ty_name.to_string()), + }, + field, + ); + } + } +} + +#[cfg(test)] +mod test { + use crate::config::ConfigReader; + + #[test] + fn test() { + let reader = ConfigReader::init(); + let root = env!("CARGO_MANIFEST_DIR"); + let config = reader + .read(format!("{}/schema/schema.graphql", root)) + .unwrap(); + let blueprint = crate::blueprint::Blueprint::try_from(&config).unwrap(); + println!("{:#?}", blueprint.fields ); + } +} diff --git a/projects/ssddOnTop/src/blueprint/definitions.rs b/projects/ssddOnTop/src/blueprint/definitions.rs new file mode 100644 index 0000000..6584fa1 --- /dev/null +++ b/projects/ssddOnTop/src/blueprint/definitions.rs @@ -0,0 +1,120 @@ +use crate::blueprint::operators::http::update_http; +use crate::blueprint::{ + Definition, FieldDefinition, InputFieldDefinition, InputObjectTypeDefinition, + InterfaceTypeDefinition, ObjectTypeDefinition, +}; +use crate::config; +use crate::config::Config; + +pub fn to_definitions(config: &Config) -> anyhow::Result> { + let mut definitions = vec![]; + for (ty_name, ty) in config.types.iter() { + let def = + to_object_type_definition(ty_name, ty, config).map( + |definition| match definition { + Definition::Object(_) => { + definition + /*if config.input_types().contains(ty_name) { + to_input_object_type_definition(object_type_definition) + } else if config.interface_types().contains(ty_name) { + to_interface_type_definition(object_type_definition) + } else { + Ok(definition) + }*/ + } + _ => definition, + }, + )?; + definitions.push(def); + } + Ok(definitions) +} + +fn to_interface_type_definition(definition: ObjectTypeDefinition) -> anyhow::Result { + Ok(Definition::Interface(InterfaceTypeDefinition { + name: definition.name, + fields: definition.fields, + })) +} + +fn to_input_object_type_definition(definition: ObjectTypeDefinition) -> anyhow::Result { + Ok(Definition::InputObject(InputObjectTypeDefinition { + name: definition.name, + fields: definition + .fields + .iter() + .map(|field| InputFieldDefinition { + name: field.name.clone(), + of_type: field.of_type.clone(), + }) + .collect(), + })) +} + +fn to_object_type_definition( + name: &str, + type_of: &config::Type1, + config_module: &Config, +) -> anyhow::Result { + to_fields(name, type_of, config_module).map(|fields| { + Definition::Object(ObjectTypeDefinition { + name: name.to_string(), + fields, + }) + }) +} + +fn to_fields( + name: &str, + ty: &config::Type1, + config: &Config, +) -> anyhow::Result> { + if !config.types.contains_key(name) { + // assume it's a scalar + return Ok(vec![]); + } + + let mut fields = vec![]; + for (field_name, field) in ty.fields.iter() { + to_field_definition(field_name, field, config).map(|field| { + fields.push(field); + })?; + } + + Ok(fields) +} + +fn to_field_definition( + field_name: &str, + field: &config::Field, + config: &Config, +) -> anyhow::Result { + let mut def = FieldDefinition::default(); + def = update_args(field_name, field.clone(), def); + def = update_http(field, config, def)?; + Ok(def) +} + +fn update_args( + field_name: &str, + field: config::Field, + mut def: FieldDefinition, +) -> FieldDefinition { + let args = field + .args + .iter() + .map(|(name, arg)| { + + InputFieldDefinition { + name: name.clone(), + of_type: arg.type_of.clone(), + } + }) + .collect::>(); + + def.name = field_name.to_string(); + def.args = args; + def.of_type = field.ty_of.clone(); + + def +} diff --git a/projects/ssddOnTop/src/blueprint/mod.rs b/projects/ssddOnTop/src/blueprint/mod.rs new file mode 100644 index 0000000..459b235 --- /dev/null +++ b/projects/ssddOnTop/src/blueprint/mod.rs @@ -0,0 +1,7 @@ +mod blueprint; +mod definitions; +pub mod model; +mod operators; +pub mod wrapping_type; + +pub use blueprint::*; diff --git a/projects/ssddOnTop/src/blueprint/model.rs b/projects/ssddOnTop/src/blueprint/model.rs new file mode 100644 index 0000000..f3fbdf3 --- /dev/null +++ b/projects/ssddOnTop/src/blueprint/model.rs @@ -0,0 +1,66 @@ +use crate::ir::IR; +use std::fmt::{Debug, Formatter}; + +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub struct FieldName(pub String); + +impl AsRef for FieldName { + fn as_ref(&self) -> &str { + &self.0 + } +} + +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub struct TypeName(pub String); + +#[derive(Clone)] +pub struct ArgId(usize); + +impl Debug for ArgId { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl ArgId { + pub fn new(id: usize) -> Self { + ArgId(id) + } +} + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct FieldId(usize); + +impl Debug for FieldId { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl FieldId { + pub fn new(id: usize) -> Self { + FieldId(id) + } + pub fn as_usize(&self) -> usize { + self.0 + } +} + +#[derive(Clone, Debug)] +pub struct Arg { + pub name: String, + pub type_of: crate::blueprint::wrapping_type::Type, +} + +#[derive(Clone, Debug)] +pub struct Nested(Vec); +#[derive(Clone, Debug)] +pub struct Flat(FieldId); + +#[derive(Clone, Debug)] +pub struct Field { + pub name: FieldName, + pub type_of: crate::blueprint::wrapping_type::Type, + pub ir: Option, + pub args: Vec, +} diff --git a/projects/ssddOnTop/src/blueprint/operators/http.rs b/projects/ssddOnTop/src/blueprint/operators/http.rs new file mode 100644 index 0000000..a174140 --- /dev/null +++ b/projects/ssddOnTop/src/blueprint/operators/http.rs @@ -0,0 +1,62 @@ +use crate::blueprint::FieldDefinition; +use crate::config; +use crate::config::Resolver; +use crate::endpoint::Endpoint; +use crate::http::method::Method; +use crate::http::RequestTemplate; +use crate::ir::{IO, IR}; + +fn compile_http(config_module: &config::Config, http: &config::Http) -> anyhow::Result { + let mut base_url = String::new(); + if let Some(base) = http.base_url.clone() { + base_url = base; + } else if let Some(base) = config_module.upstream.base_url.clone() { + base_url = base; + } else { + return Err(anyhow::anyhow!("No base URL defined")); + } + let mut base_url = base_url.trim_end_matches('/').to_owned(); + base_url.push_str(http.path.clone().as_str()); + + let query = http + .query + .clone() + .iter() + .map(|key_value| { + ( + key_value.key.clone(), + key_value.value.clone(), + key_value.skip_empty.unwrap_or_default(), + ) + }) + .collect(); + + let req_template = RequestTemplate::try_from( + Endpoint::new(base_url.to_string()) + .method(http.method.clone()) + .query(query), + )?; + + let ir = if http.method == Method::GET { + IR::IO(IO::Http { req_template }) + } else { + IR::IO(IO::Http { req_template }) + }; + + Ok(ir) +} + +pub fn update_http( + field: &config::Field, + config: &config::Config, + mut def: FieldDefinition, +) -> anyhow::Result { + let Some(Resolver::Http(http)) = field.resolver.as_ref() else { + return Ok(def); + }; + + let ir = compile_http(config, http)?; + def.resolver = Some(ir); + // TODO: Validate + Ok(def) +} diff --git a/projects/ssddOnTop/src/blueprint/operators/mod.rs b/projects/ssddOnTop/src/blueprint/operators/mod.rs new file mode 100644 index 0000000..3883215 --- /dev/null +++ b/projects/ssddOnTop/src/blueprint/operators/mod.rs @@ -0,0 +1 @@ +pub mod http; diff --git a/projects/ssddOnTop/src/blueprint/wrapping_type.rs b/projects/ssddOnTop/src/blueprint/wrapping_type.rs new file mode 100644 index 0000000..87c9792 --- /dev/null +++ b/projects/ssddOnTop/src/blueprint/wrapping_type.rs @@ -0,0 +1,199 @@ +use std::fmt::Formatter; +use std::ops::Deref; + +use async_graphql::parser::types as async_graphql_types; +use async_graphql::Name; +use serde::{Deserialize, Serialize}; + +use crate::is_default; + +/// Type to represent GraphQL type usage with modifiers +/// [spec](https://spec.graphql.org/October2021/#sec-Wrapping-Types) +#[derive(Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(untagged)] +pub enum Type { + Named { + /// Name of the type + name: String, + /// Flag to indicate the type is required. + #[serde(rename = "required", default, skip_serializing_if = "is_default")] + non_null: bool, + }, + List { + /// Type is a list + #[serde(rename = "list")] + of_type: Box, + /// Flag to indicate the type is required. + #[serde(rename = "required", default, skip_serializing_if = "is_default")] + non_null: bool, + }, +} + +impl std::fmt::Debug for Type { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Type::Named { name, non_null } => { + if *non_null { + write!(f, "{}!", name) + } else { + write!(f, "{}", name) + } + } + Type::List { of_type, non_null } => { + if *non_null { + write!(f, "[{:?}]!", of_type) + } else { + write!(f, "[{:?}]", of_type) + } + } + } + } +} + +impl Default for Type { + fn default() -> Self { + Type::Named { + name: "JSON".to_string(), + non_null: false, + } + } +} + +impl Type { + /// gets the name of the type + pub fn name(&self) -> &String { + match self { + Type::Named { name, .. } => name, + Type::List { of_type, .. } => of_type.name(), + } + } + + /// checks if the type is nullable + pub fn is_nullable(&self) -> bool { + !match self { + Type::Named { non_null, .. } => *non_null, + Type::List { non_null, .. } => *non_null, + } + } + /// checks if the type is a list + pub fn is_list(&self) -> bool { + matches!(self, Type::List { .. }) + } + + /// convert this type into NonNull type + pub fn into_required(self) -> Self { + match self { + Type::Named { name, .. } => Self::Named { + name, + non_null: true, + }, + Type::List { of_type, .. } => Self::List { + of_type, + non_null: true, + }, + } + } + + /// convert this into nullable type + pub fn into_nullable(self) -> Self { + match self { + Type::Named { name, .. } => Self::Named { + name, + non_null: false, + }, + Type::List { of_type, .. } => Self::List { + of_type, + non_null: false, + }, + } + } + + /// create a nullable list type from this type + pub fn into_list(self) -> Self { + Type::List { + of_type: Box::new(self), + non_null: false, + } + } + + /// convert this type from list to non-list for any level of nesting + pub fn into_single(self) -> Self { + match self { + Type::Named { .. } => self, + Type::List { of_type, .. } => of_type.into_single(), + } + } + + /// replace the name of the underlying type + pub fn with_name(self, name: String) -> Self { + match self { + Type::Named { non_null, .. } => Type::Named { name, non_null }, + Type::List { of_type, non_null } => Type::List { + of_type: Box::new(of_type.with_name(name)), + non_null, + }, + } + } +} + +impl From<&async_graphql_types::Type> for Type { + fn from(value: &async_graphql_types::Type) -> Self { + let non_null = !value.nullable; + + match &value.base { + async_graphql_types::BaseType::Named(name) => Self::Named { + name: name.to_string(), + non_null, + }, + async_graphql_types::BaseType::List(type_) => Self::List { + of_type: Box::new(type_.as_ref().into()), + non_null, + }, + } + } +} + +impl From<&Type> for async_graphql_types::Type { + fn from(value: &Type) -> Self { + let nullable = value.is_nullable(); + + let base = match value { + Type::Named { name, .. } => async_graphql_types::BaseType::Named(Name::new(name)), + Type::List { of_type, .. } => async_graphql_types::BaseType::List(Box::new( + async_graphql_types::Type::from(of_type.deref()), + )), + }; + + async_graphql_types::Type { base, nullable } + } +} + +impl From<&Type> for async_graphql::dynamic::TypeRef { + fn from(value: &Type) -> Self { + let nullable = value.is_nullable(); + + let base = match value { + Type::Named { name, .. } => { + async_graphql::dynamic::TypeRef::Named(name.to_owned().into()) + } + Type::List { of_type, .. } => async_graphql::dynamic::TypeRef::List(Box::new( + async_graphql::dynamic::TypeRef::from(of_type.deref()), + )), + }; + + if nullable { + base + } else { + async_graphql::dynamic::TypeRef::NonNull(Box::new(base)) + } + } +} + +impl From for Type { + fn from(value: String) -> Self { + Self::Named { + name: value, + non_null: false, + } + } +} diff --git a/projects/ssddOnTop/src/cache.rs b/projects/ssddOnTop/src/cache.rs new file mode 100644 index 0000000..1fc9004 --- /dev/null +++ b/projects/ssddOnTop/src/cache.rs @@ -0,0 +1,201 @@ +use http_cache_reqwest::{CacheManager, HttpResponse}; +use http_cache_semantics::CachePolicy; +use moka::future::Cache; +use moka::policy::EvictionPolicy; +use serde::{Deserialize, Serialize}; +pub type BoxError = Box; +pub type Result = std::result::Result; +use std::sync::Arc; + +pub struct HttpCacheManager { + pub cache: Arc>, +} + +impl Default for HttpCacheManager { + fn default() -> Self { + Self::new(42) + } +} + +#[derive(Clone, Deserialize, Serialize)] +pub struct Store { + response: HttpResponse, + policy: CachePolicy, +} + +impl HttpCacheManager { + pub fn new(cache_size: u64) -> Self { + let cache = Cache::builder() + .eviction_policy(EvictionPolicy::lru()) + .max_capacity(cache_size) + .build(); + Self { + cache: Arc::new(cache), + } + } + + pub async fn clear(&self) -> Result<()> { + self.cache.invalidate_all(); + self.cache.run_pending_tasks().await; + Ok(()) + } +} + +#[async_trait::async_trait] +impl CacheManager for HttpCacheManager { + async fn get(&self, cache_key: &str) -> Result> { + let store: Store = match self.cache.get(cache_key).await { + Some(d) => d, + None => return Ok(None), + }; + Ok(Some((store.response, store.policy))) + } + + async fn put( + &self, + cache_key: String, + response: HttpResponse, + policy: CachePolicy, + ) -> Result { + let data = Store { + response: response.clone(), + policy, + }; + self.cache.insert(cache_key, data).await; + self.cache.run_pending_tasks().await; + Ok(response) + } + + async fn delete(&self, cache_key: &str) -> Result<()> { + self.cache.invalidate(cache_key).await; + self.cache.run_pending_tasks().await; + Ok(()) + } +} + +/*#[cfg(test)] +mod tests { + use std::collections::HashMap; + use std::str::FromStr; + use http::Response; + use http_cache::HttpVersion; + use reqwest::{Method, ResponseBuilderExt, StatusCode}; + use reqwest::header::HeaderMap; + use url::Url; + + use super::*; + + impl http_cache_semantics::ResponseLike for http::Response { + fn status(&self) -> StatusCode { + StatusCode::from_str(self.status().as_str()).unwrap() + } + + fn headers(&self) -> &HeaderMap { + todo!() + } + } + + fn convert_response(response: HttpResponse) -> Result> { + let ret_res = http::Response::builder() + .status(response.status) + .version( + match response.version { + HttpVersion::Http09 => http::Version::HTTP_09, + HttpVersion::Http10 => http::Version::HTTP_10, + HttpVersion::Http11 => http::Version::HTTP_11, + HttpVersion::H2 => http::Version::HTTP_2, + HttpVersion::H3 => http::Version::HTTP_3, + _ => unreachable!() + } + ) + .body(response.body)?; + let (parts, body) = ret_res.into_parts(); + + Ok(Response::from_parts(parts, bytes::Bytes::from(body))) + } + + async fn insert_key_into_cache(manager: &HttpCacheManager, key: &str) { + let request_url = "http://localhost:8080/test"; + let url = Url::parse(request_url).unwrap(); + + let http_resp = HttpResponse { + headers: HashMap::default(), + body: vec![1, 2, 3], + status: 200, + url: url.clone(), + version: HttpVersion::Http11, + }; + let resp = convert_response(http_resp.clone()).unwrap(); + let request: reqwest::Request = + reqwest::Request::new(Method::GET, request_url.parse().unwrap()); + + let _ = manager + .put( + key.to_string(), + http_resp, + CachePolicy::new(&request, &resp), + ) + .await + .unwrap(); + } + + #[tokio::test] + async fn test_put() { + let manager = HttpCacheManager::default(); + insert_key_into_cache(&manager, "test").await; + assert!(manager.cache.contains_key("test")); + } + + #[tokio::test] + async fn test_get_when_key_present() { + let manager = HttpCacheManager::default(); + insert_key_into_cache(&manager, "test").await; + let value = manager.get("test").await.unwrap(); + assert!(value.is_some()); + } + + #[tokio::test] + async fn test_get_when_key_not_present() { + let manager = HttpCacheManager::default(); + let result = manager.get("test").await.unwrap(); + assert!(result.is_none()); + } + + #[tokio::test] + async fn test_delete_when_key_present() { + let manager = HttpCacheManager::default(); + insert_key_into_cache(&manager, "test").await; + + assert!(manager.cache.iter().count() as i32 == 1); + let _ = manager.delete("test").await; + assert!(manager.cache.iter().count() as i32 == 0); + } + + #[tokio::test] + async fn test_clear() { + let manager = HttpCacheManager::default(); + insert_key_into_cache(&manager, "test").await; + assert!(manager.cache.iter().count() as i32 == 1); + let _ = manager.clear().await; + assert!(manager.cache.iter().count() as i32 == 0); + } + + #[tokio::test] + async fn test_lru_eviction_policy() { + let manager = HttpCacheManager::new(2); + insert_key_into_cache(&manager, "test-1").await; + insert_key_into_cache(&manager, "test-2").await; + insert_key_into_cache(&manager, "test-10").await; + + let res = manager.get("test-1").await.unwrap(); + assert!(res.is_none()); + + let res = manager.get("test-2").await.unwrap(); + assert!(res.is_some()); + + let res = manager.get("test-10").await.unwrap(); + assert!(res.is_some()); + + assert_eq!(manager.cache.entry_count(), 2); + } +}*/ diff --git a/projects/ssddOnTop/src/config/config.rs b/projects/ssddOnTop/src/config/config.rs new file mode 100644 index 0000000..48ce7f5 --- /dev/null +++ b/projects/ssddOnTop/src/config/config.rs @@ -0,0 +1,133 @@ +use crate::blueprint::wrapping_type; +use crate::config::url_query::URLQuery; +use crate::directive::DirectiveCodec; +use crate::http::method::Method; +use crate::is_default; +use async_graphql::parser::types::ConstDirective; +use async_graphql::Positioned; +use macros::{DirectiveDefinition, InputDefinition}; +use serde::{Deserialize, Serialize}; +use std::collections::BTreeMap; +use std::num::NonZeroU64; + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct Config { + pub types: BTreeMap, + pub upstream: Upstream, + pub server: Server, + pub schema: RootSchema, +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct RootSchema { + pub query: Option, + #[serde(default, skip_serializing_if = "is_default")] + pub mutation: Option, + #[serde(default, skip_serializing_if = "is_default")] + pub subscription: Option, +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct Server { + #[serde(default, skip_serializing_if = "is_default")] + pub port: u16, +} + +impl Default for Server { + fn default() -> Self { + Server { port: 8000 } + } +} + +#[derive(Serialize, Deserialize, Clone, Debug, Default)] +pub struct Upstream { + #[serde(rename = "baseURL", default, skip_serializing_if = "is_default")] + pub base_url: Option, + #[serde(default, skip_serializing_if = "is_default")] + pub http_cache: Option, +} + +// TODO: rename +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Default)] +pub struct Type1 { + pub fields: BTreeMap, + pub cache: Option, +} + +impl Type1 { + pub fn fields(mut self, fields: Vec<(&str, Field)>) -> Self { + let mut graphql_fields = BTreeMap::new(); + for (name, field) in fields { + graphql_fields.insert(name.to_string(), field); + } + self.fields = graphql_fields; + self + } + + pub fn scalar(&self) -> bool { + self.fields.is_empty() + } +} + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] +pub struct Cache { + pub max_age: NonZeroU64, +} + +#[derive( + Serialize, + Deserialize, + Clone, + Debug, + PartialEq, + Eq, + schemars::JsonSchema, + macros::CustomResolver, +)] +pub enum Resolver { + Http(Http), +} + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Default)] +pub struct Field { + pub ty_of: wrapping_type::Type, + #[serde(flatten, default, skip_serializing_if = "is_default")] + pub resolver: Option, + pub args: BTreeMap, +} + +#[derive( + Serialize, + Deserialize, + Clone, + Debug, + PartialEq, + Eq, + schemars::JsonSchema, + DirectiveDefinition, + InputDefinition, +)] +#[directive_definition(locations = "FieldDefinition")] +pub struct Http { + pub path: String, + #[serde(default, skip_serializing_if = "is_default")] + pub method: Method, + #[serde(rename = "baseURL", default, skip_serializing_if = "is_default")] + pub base_url: Option, + #[serde(default, skip_serializing_if = "is_default")] + pub query: Vec, +} + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] +pub struct Arg { + pub type_of: wrapping_type::Type, + pub default_value: Option, +} + +#[derive(Default, Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum GraphQLOperationType { + #[default] + Query, + Mutation, +} diff --git a/projects/ssddOnTop/src/config/kv.rs b/projects/ssddOnTop/src/config/kv.rs new file mode 100644 index 0000000..47086f1 --- /dev/null +++ b/projects/ssddOnTop/src/config/kv.rs @@ -0,0 +1,7 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Clone, Debug, Default, Eq, PartialEq)] +pub struct KeyValue { + pub key: String, + pub value: String, +} diff --git a/projects/ssddOnTop/src/config/mod.rs b/projects/ssddOnTop/src/config/mod.rs new file mode 100644 index 0000000..9d63f4b --- /dev/null +++ b/projects/ssddOnTop/src/config/mod.rs @@ -0,0 +1,8 @@ +mod config; +mod kv; +mod reader; +mod url_query; + +pub use config::*; +pub use kv::*; +pub use reader::*; diff --git a/projects/ssddOnTop/src/config/reader.rs b/projects/ssddOnTop/src/config/reader.rs new file mode 100644 index 0000000..e1fb510 --- /dev/null +++ b/projects/ssddOnTop/src/config/reader.rs @@ -0,0 +1,32 @@ +// Just a reader.. nothing special here + +use crate::config::Config; +use std::path::Path; + +pub struct ConfigReader {} + +impl ConfigReader { + pub fn init() -> Self { + Self {} + } + pub fn read>(&self, path: T) -> anyhow::Result { + let sdl = std::fs::read_to_string(path)?; + let doc = async_graphql::parser::parse_schema(sdl)?; + crate::from_doc::from_doc(doc) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_read() { + let root = env!("CARGO_MANIFEST_DIR"); + let reader = ConfigReader::init(); + let config = reader + .read(format!("{}/schema/schema.graphql", root)) + .unwrap(); + assert_eq!(config.types.len(), 5); + } +} diff --git a/projects/ssddOnTop/src/config/url_query.rs b/projects/ssddOnTop/src/config/url_query.rs new file mode 100644 index 0000000..2c0a72f --- /dev/null +++ b/projects/ssddOnTop/src/config/url_query.rs @@ -0,0 +1,18 @@ +use serde::{Deserialize, Serialize}; + +use crate::is_default; + +#[derive(Serialize, Deserialize, Clone, Debug, Default, Eq, PartialEq, schemars::JsonSchema)] +#[serde(rename_all = "camelCase")] +/// The URLQuery input type represents a query parameter to be included in a +/// URL. +pub struct URLQuery { + /// The key or name of the query parameter. + pub key: String, + /// The actual value or a mustache template to resolve the value dynamically + /// for the query parameter. + pub value: String, + #[serde(default, skip_serializing_if = "is_default")] + /// Determines whether to ignore query parameters with empty values. + pub skip_empty: Option, +} diff --git a/projects/ssddOnTop/src/directive.rs b/projects/ssddOnTop/src/directive.rs new file mode 100644 index 0000000..327b4ce --- /dev/null +++ b/projects/ssddOnTop/src/directive.rs @@ -0,0 +1,147 @@ +use crate::blueprint; +use async_graphql::parser::types::ConstDirective; +use async_graphql::{Name, Pos, Positioned}; +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Value}; +use serde_path_to_error::deserialize; + +fn pos(a: A) -> Positioned { + Positioned::new(a, Pos::default()) +} + +#[allow(dead_code)] +fn to_const_directive(directive: &blueprint::Directive) -> anyhow::Result { + let mut arguments = Vec::new(); + for (k, v) in directive.arguments.iter() { + let name = pos(Name::new(k.clone())); + let x = serde_json::from_value(v.clone()) + .map(pos) + .map(|value| (name, value))?; + arguments.push(x); + } + Ok(ConstDirective { + name: pos(Name::new(directive.name.clone())), + arguments, + }) +} + +pub trait DirectiveCodec: Sized { + fn directive_name() -> String; + fn from_directive(directive: &ConstDirective) -> anyhow::Result; + fn to_directive(&self) -> ConstDirective; + fn trace_name() -> String { + format!("@{}", Self::directive_name()) + } + fn from_directives<'a>( + directives: impl Iterator>, + ) -> anyhow::Result> { + for directive in directives { + if directive.node.name.node == Self::directive_name() { + return Self::from_directive(&directive.node).map(Some); + } + } + Ok(None) + } +} +fn lower_case_first_letter(s: &str) -> String { + if s.len() <= 2 { + s.to_lowercase() + } else if let Some(first_char) = s.chars().next() { + first_char.to_string().to_lowercase() + &s[first_char.len_utf8()..] + } else { + s.to_string() + } +} + +impl<'a, A: Deserialize<'a> + Serialize + 'a> DirectiveCodec for A { + fn directive_name() -> String { + lower_case_first_letter( + std::any::type_name::() + .split("::") + .last() + .unwrap_or_default(), + ) + } + + fn from_directive(directive: &ConstDirective) -> anyhow::Result { + let mut map = Map::new(); + for (k, v) in directive.arguments.iter() { + let (k, v) = serde_json::to_value(&v.node).map(|v| (k.node.as_str().to_string(), v))?; + map.insert(k, v); + } + Ok(deserialize(Value::Object(map))?) + /* Valid::from_iter(directive.arguments.iter(), |(k, v)| { + Valid::from(serde_json::to_value(&v.node).map_err(|e| { + ValidationError::new(e.to_string()) + .trace(format!("@{}", directive.name.node).as_str()) + })) + .map(|v| (k.node.as_str().to_string(), v)) + }) + .map(|items| { + items.iter().fold(Map::new(), |mut map, (k, v)| { + map.insert(k.clone(), v.clone()); + map + }) + }) + .and_then(|map| match deserialize(Value::Object(map)) { + Ok(a) => Valid::succeed(a), + Err(e) => Valid::from_validation_err( + ValidationError::from(e).trace(format!("@{}", directive.name.node).as_str()), + ), + })*/ + } + + fn to_directive(&self) -> ConstDirective { + let name = Self::directive_name(); + let value = serde_json::to_value(self).unwrap(); + let default_map = &Map::new(); + let map = value.as_object().unwrap_or(default_map); + + let mut arguments = Vec::new(); + for (k, v) in map { + arguments.push(( + pos(Name::new(k.clone())), + pos(serde_json::from_value(v.to_owned()).unwrap()), + )); + } + + ConstDirective { + name: pos(Name::new(name)), + arguments, + } + } +} + +#[cfg(test)] +mod tests { + + use crate::blueprint::Directive; + use crate::directive::{pos, to_const_directive}; + use async_graphql::parser::types::ConstDirective; + use async_graphql_value::Name; + use pretty_assertions::assert_eq; + + #[test] + fn test_to_const_directive() { + let directive = Directive { + name: "test".to_string(), + arguments: vec![("a".to_string(), serde_json::json!(1.0))] + .into_iter() + .collect(), + index: 0, + }; + + let const_directive: ConstDirective = to_const_directive(&directive).unwrap(); + let expected_directive: ConstDirective = ConstDirective { + name: pos(Name::new("test")), + arguments: vec![(pos(Name::new("a")), pos(async_graphql::Value::from(1.0)))] + .into_iter() + .collect(), + }; + + assert_eq!( + format!("{:?}", const_directive), + format!("{:?}", expected_directive) + ); + } +} diff --git a/projects/ssddOnTop/src/dl/dedupe.rs b/projects/ssddOnTop/src/dl/dedupe.rs new file mode 100644 index 0000000..959b6f2 --- /dev/null +++ b/projects/ssddOnTop/src/dl/dedupe.rs @@ -0,0 +1,420 @@ +use std::collections::HashMap; +use std::hash::Hash; +use std::sync::{Arc, Mutex, Weak}; + +use futures_util::Future; +use tokio::sync::broadcast; + +pub trait Key: Send + Sync + Eq + Hash + Clone {} +impl Key for A {} + +pub trait Value: Send + Sync + Clone {} +impl Value for A {} + +/// +/// Allows deduplication of async operations based on a key. +pub struct Dedupe { + /// Cache storage for the operations. + cache: Arc>>>, + /// Initial size of the multi-producer, multi-consumer channel. + size: usize, + /// When enabled allows the operations to be cached forever. + persist: bool, +} + +/// Represents the current state of the operation. +enum State { + /// Means that the operation has been executed and the result is stored. + Ready(Value), + + /// Means that the operation is in progress and the result can be sent via + /// the stored sender whenever it's available in the future. + Pending(Weak>), +} + +/// Represents the next steps +enum Step { + /// The operation has been executed and the result must be returned. + Return(Value), + + /// The operation is in progress and the result must be awaited on the + /// receiver. + Await(broadcast::Receiver), + + /// The operation needs to be executed and the result needs to be sent to + /// the provided sender. + Init(Arc>), +} + +impl Dedupe { + pub fn new(size: usize, persist: bool) -> Self { + Self { + cache: Arc::new(Mutex::new(HashMap::new())), + size, + persist, + } + } + + pub async fn dedupe<'a, Fn, Fut>(&'a self, key: &'a K, or_else: Fn) -> V + where + Fn: FnOnce() -> Fut, + Fut: Future, + { + loop { + let value = match self.step(key) { + Step::Return(value) => value, + Step::Await(mut rx) => match rx.recv().await { + Ok(value) => value, + Err(_) => { + // If we get an error that means the task with + // owned tx (sender) was dropped.i.e. there is no result in cache + // and we can try another attempt because probably another + // task will do the execution + continue; + } + }, + Step::Init(tx) => { + let value = or_else().await; + let mut guard = self.cache.lock().unwrap(); + if self.persist { + guard.insert(key.to_owned(), State::Ready(value.clone())); + } else { + guard.remove(key); + } + let _ = tx.send(value.clone()); + value + } + }; + + return value; + } + } + + fn step(&self, key: &K) -> Step { + let mut this = self.cache.lock().unwrap(); + + if let Some(state) = this.get(key) { + match state { + State::Ready(value) => return Step::Return(value.clone()), + State::Pending(tx) => { + // We can upgrade from Weak to Arc only in case when + // original tx is still alive + // otherwise we will create in the code below + if let Some(tx) = tx.upgrade() { + return Step::Await(tx.subscribe()); + } + } + } + } + + let (tx, _) = broadcast::channel(self.size); + let tx = Arc::new(tx); + // Store a Weak version of tx and pass actual tx to further handling + // to control if tx is still alive and will be able to handle the request. + // Only single `strong` reference to tx should exist so we can + // understand when the execution is still alive and we'll get the response + this.insert(key.to_owned(), State::Pending(Arc::downgrade(&tx))); + Step::Init(tx) + } +} + +pub struct DedupeResult(Dedupe>); + +impl DedupeResult { + pub fn new(persist: bool) -> Self { + Self(Dedupe::new(1, persist)) + } +} + +impl DedupeResult { + pub async fn dedupe<'a, Fn, Fut>(&'a self, key: &'a K, or_else: Fn) -> Result + where + Fn: FnOnce() -> Fut, + Fut: Future>, + { + self.0.dedupe(key, or_else).await + } +} + +#[cfg(test)] +mod tests { + use std::ops::Deref; + use std::sync::atomic::{AtomicUsize, Ordering}; + use std::time::Duration; + + use assert_eq; + use tokio::join; + use tokio::time::{sleep, timeout_at, Instant}; + + use super::*; + + #[tokio::test] + async fn test_no_key() { + let cache = Arc::new(Dedupe::::new(1000, true)); + let actual = cache.dedupe(&1, || Box::pin(async { 1 })).await; + assert_eq!(actual, 1); + } + + #[tokio::test] + async fn test_with_key() { + let cache = Arc::new(Dedupe::::new(1000, true)); + cache.dedupe(&1, || Box::pin(async { 1 })).await; + + let actual = cache.dedupe(&1, || Box::pin(async { 2 })).await; + assert_eq!(actual, 1); + } + + #[tokio::test] + async fn test_with_multi_get() { + let cache = Arc::new(Dedupe::::new(1000, true)); + + for i in 0..100 { + cache.dedupe(&1, || Box::pin(async move { i })).await; + } + + let actual = cache.dedupe(&1, || Box::pin(async { 2 })).await; + assert_eq!(actual, 0); + } + + #[tokio::test] + async fn test_with_multi_async_get() { + let cache = Arc::new(Dedupe::::new(1000, true)); + + let a = cache.dedupe(&1, || { + Box::pin(async move { + sleep(Duration::from_millis(1)).await; + 1 + }) + }); + let b = cache.dedupe(&1, || { + Box::pin(async move { + sleep(Duration::from_millis(1)).await; + 2 + }) + }); + let (a, b) = join!(a, b); + + assert_eq!(a, b); + } + + async fn compute_value(counter: Arc) -> String { + counter.fetch_add(1, Ordering::SeqCst); + sleep(Duration::from_millis(1)).await; + format!("value_{}", counter.load(Ordering::SeqCst)) + } + + #[tokio::test(worker_threads = 16, flavor = "multi_thread")] + async fn test_deadlock_scenario() { + let _ = tracing_subscriber::fmt(); + let cache = Arc::new(Dedupe::::new(1000, true)); + let key = 1; + let counter = Arc::new(AtomicUsize::new(0)); + let mut handles = Vec::new(); + + // Spawn multiple tasks to simulate concurrent access + for i in 0..1000000 { + let cache = cache.clone(); + let counter = counter.clone(); + let handle = tokio::task::spawn(async move { + let result = cache + .dedupe(&key, || Box::pin(compute_value(counter))) + .await; + (i, result) + }); + handles.push(handle); + } + // Await each task for any potential deadlocks + for handle in handles.into_iter() { + let _ = handle.await.unwrap(); + } + // Check that compute_value was called exactly once + assert_eq!( + counter.load(Ordering::SeqCst), + 1, + "compute_value was called more than once" + ); + } + + #[tokio::test] + async fn test_hanging_after_dropped() { + let cache = Arc::new(Dedupe::::new(100, true)); + + let task = cache.dedupe(&1, move || async move { + sleep(Duration::from_millis(100)).await; + }); + + // drops the task since the underlying sleep timeout is higher than the + // timeout here + + timeout_at(Instant::now() + Duration::from_millis(10), task) + .await + .expect_err("Should throw timeout error"); + + cache + .dedupe(&1, move || async move { + sleep(Duration::from_millis(100)).await; + }) + .await; + } + + #[tokio::test] + async fn test_hanging_dropped_while_in_use() { + let cache = Arc::new(Dedupe::::new(100, true)); + let cache_1 = cache.clone(); + let cache_2 = cache.clone(); + + let task_1 = tokio::spawn(async move { + cache_1 + .dedupe(&1, move || async move { + sleep(Duration::from_millis(100)).await; + 100 + }) + .await + }); + + let task_2 = tokio::spawn(async move { + cache_2 + .dedupe(&1, move || async move { + sleep(Duration::from_millis(100)).await; + 200 + }) + .await + }); + + sleep(Duration::from_millis(10)).await; + + // drop the first task + task_1.abort(); + + let actual = task_2.await.unwrap(); + assert_eq!(actual, 200) + } + + // TODO: This is a failing test + #[tokio::test] + #[ignore] + async fn test_should_not_abort_call_1() { + #[derive(Debug, PartialEq, Clone)] + struct Status { + // Set this in the first call + call_1: bool, + + // Set this in the second call + call_2: bool, + } + + let status = Arc::new(Mutex::new(Status { + call_1: false, + call_2: false, + })); + + let cache = Arc::new(Dedupe::::new(100, true)); + let cache_1 = cache.clone(); + let cache_2 = cache.clone(); + let status_1 = status.clone(); + let status_2 = status.clone(); + + // Task 1 completed in 100ms + let task_1 = tokio::spawn(async move { + cache_1 + .dedupe(&1, move || async move { + sleep(Duration::from_millis(100)).await; + status_1.lock().unwrap().call_1 = true; + }) + .await + }); + + // Wait for 10ms + sleep(Duration::from_millis(10)).await; + + // Task 2 completed in 200ms + tokio::spawn(async move { + cache_2 + .dedupe(&1, move || async move { + sleep(Duration::from_millis(120)).await; + status_2.lock().unwrap().call_2 = true; + }) + .await + }); + + // Wait for 10ms + sleep(Duration::from_millis(10)).await; + + // Abort the task_1 + task_1.abort(); + + sleep(Duration::from_millis(300)).await; + + // Task 1 should still have completed because others are dependent on it. + let actual = status.lock().unwrap().deref().to_owned(); + assert_eq!( + actual, + Status { + call_1: true, + call_2: false + } + ) + } + + #[tokio::test] + async fn test_should_abort_all() { + #[derive(Debug, PartialEq, Clone)] + struct Status { + // Set this in the first call + call_1: bool, + + // Set this in the second call + call_2: bool, + } + + let status = Arc::new(Mutex::new(Status { + call_1: false, + call_2: false, + })); + + let cache = Arc::new(Dedupe::::new(100, true)); + let cache_1 = cache.clone(); + let cache_2 = cache.clone(); + let status_1 = status.clone(); + let status_2 = status.clone(); + + // Task 1 completed in 100ms + let task_1 = tokio::spawn(async move { + cache_1 + .dedupe(&1, move || async move { + sleep(Duration::from_millis(100)).await; + status_1.lock().unwrap().call_1 = true; + }) + .await + }); + + // Task 2 completed in 150ms + let task_2 = tokio::spawn(async move { + cache_2 + .dedupe(&1, move || async move { + sleep(Duration::from_millis(150)).await; + status_2.lock().unwrap().call_2 = true; + }) + .await + }); + + // Wait for 10ms + sleep(Duration::from_millis(50)).await; + + // Abort the task_1 & task_2 + task_1.abort(); + task_2.abort(); + + sleep(Duration::from_millis(300)).await; + + // No task should have completed + let actual = status.lock().unwrap().deref().to_owned(); + assert_eq!( + actual, + Status { + call_1: false, + call_2: false + } + ) + } +} diff --git a/projects/ssddOnTop/src/dl/mod.rs b/projects/ssddOnTop/src/dl/mod.rs new file mode 100644 index 0000000..f60f1a8 --- /dev/null +++ b/projects/ssddOnTop/src/dl/mod.rs @@ -0,0 +1 @@ +pub mod dedupe; diff --git a/projects/ssddOnTop/src/endpoint.rs b/projects/ssddOnTop/src/endpoint.rs new file mode 100644 index 0000000..1935cba --- /dev/null +++ b/projects/ssddOnTop/src/endpoint.rs @@ -0,0 +1,19 @@ +use crate::http::method::Method; +use derive_setters::Setters; + +#[derive(Clone, Debug, Setters)] +pub struct Endpoint { + pub path: String, + pub query: Vec<(String, String, bool)>, + pub method: Method, +} + +impl Endpoint { + pub fn new(url: String) -> Endpoint { + Self { + path: url, + query: Default::default(), + method: Default::default(), + } + } +} diff --git a/projects/ssddOnTop/src/from_doc.rs b/projects/ssddOnTop/src/from_doc.rs new file mode 100644 index 0000000..73bc0e9 --- /dev/null +++ b/projects/ssddOnTop/src/from_doc.rs @@ -0,0 +1,268 @@ +use crate::config::{Arg, Cache, Config, Field, Resolver, RootSchema, Server, Type1, Upstream}; +use crate::directive::DirectiveCodec; +use anyhow::Result; +use async_graphql::parser::types::{ + ConstDirective, FieldDefinition, InputObjectType, InputValueDefinition, InterfaceType, + ObjectType, SchemaDefinition, ServiceDocument, Type, TypeDefinition, TypeKind, + TypeSystemDefinition, +}; +use async_graphql::{Name, Positioned}; +use std::collections::BTreeMap; + +const DEFAULT_SCHEMA_DEFINITION: &SchemaDefinition = &SchemaDefinition { + extend: false, + directives: Vec::new(), + query: None, + mutation: None, + subscription: None, +}; + +pub fn from_doc(doc: ServiceDocument) -> Result { + let type_definitions: Vec<_> = doc + .definitions + .iter() + .filter_map(|def| match def { + TypeSystemDefinition::Type(td) => Some(td), + _ => None, + }) + .collect(); + + let types = to_types(&type_definitions)?; + let sd = schema_definition(&doc)?; + let server = server(sd)?; + let upstream = upstream(sd)?; + let schema = schema_definition(&doc).map(to_root_schema)?; + + Ok(Config { + types, + upstream, + server, + schema, + }) +} +fn to_root_schema(schema_definition: &SchemaDefinition) -> RootSchema { + let query = schema_definition.query.as_ref().map(pos_name_to_string); + let mutation = schema_definition.mutation.as_ref().map(pos_name_to_string); + let subscription = schema_definition + .subscription + .as_ref() + .map(pos_name_to_string); + + RootSchema { + query, + mutation, + subscription, + } +} +fn upstream(schema_definition: &SchemaDefinition) -> Result { + process_schema_directives(schema_definition, Upstream::directive_name().as_str()) +} + +fn schema_definition(doc: &ServiceDocument) -> Result<&SchemaDefinition> { + doc.definitions + .iter() + .find_map(|def| match def { + TypeSystemDefinition::Schema(schema_definition) => Some(&schema_definition.node), + _ => None, + }) + .map_or_else(|| Ok(DEFAULT_SCHEMA_DEFINITION), Ok) +} + +fn server(schema_definition: &SchemaDefinition) -> Result { + process_schema_directives(schema_definition, Server::directive_name().as_str()) +} + +fn process_schema_directives( + schema_definition: &SchemaDefinition, + directive_name: &str, +) -> Result { + let mut res = Ok(T::default()); + for directive in schema_definition.directives.iter() { + if directive.node.name.node.as_ref() == directive_name { + res = T::from_directive(&directive.node); + } + } + res +} + +fn pos_name_to_string(pos: &Positioned) -> String { + pos.node.to_string() +} + +fn to_types( + type_definitions: &Vec<&Positioned>, +) -> Result> { + let mut map = BTreeMap::new(); + for type_definition in type_definitions.iter() { + let type_name = pos_name_to_string(&type_definition.node.name); + let ty = match type_definition.node.kind.clone() { + TypeKind::Object(object_type) => { + to_object_type(&object_type, &type_definition.node.directives) + } + TypeKind::Interface(interface_type) => { + to_object_type(&interface_type, &type_definition.node.directives) + } + TypeKind::InputObject(input_object_type) => to_input_object(input_object_type), + _ => Err(anyhow::anyhow!( + "Unsupported type kind: {:?}", + type_definition.node.kind + )), + }?; + + map.insert(type_name, ty); + } + + Ok(map) +} + +fn to_input_object(input_object_type: InputObjectType) -> Result { + let fields = to_input_object_fields(&input_object_type.fields)?; + Ok(Type1 { + fields, + ..Default::default() + }) +} + +fn to_input_object_fields( + input_object_fields: &Vec>, +) -> Result> { + to_fields_inner(input_object_fields, to_input_object_field) +} + +fn to_input_object_field(field_definition: &InputValueDefinition) -> Result { + to_common_field(field_definition, BTreeMap::new()) +} + +fn to_object_type(object: &T, directives: &[Positioned]) -> Result +where + T: ObjectLike, +{ + let fields = object.fields(); + + let cache = Cache::from_directives(directives.iter())?; + let fields = to_fields(fields)?; + Ok(Type1 { fields, cache }) +} + +fn to_fields(fields: &Vec>) -> Result> { + to_fields_inner(fields, to_field) +} + +fn to_fields_inner( + fields: &Vec>, + transform: F, +) -> Result> +where + F: Fn(&T) -> Result, + T: HasName, +{ + let mut map = BTreeMap::new(); + for field in fields.iter() { + let field_name = pos_name_to_string(field.node.name()); + let (name, field) = transform(&field.node).map(|field| (field_name, field))?; + map.insert(name, field); + } + + Ok(map) +} + +fn to_field(field_definition: &FieldDefinition) -> Result { + to_common_field(field_definition, to_args(field_definition)) +} + +fn to_common_field(field: &F, args: BTreeMap) -> Result +where + F: FieldLike + HasName, +{ + let type_of = field.type_of(); + let directives = field.directives(); + + let resolver = Resolver::from_directives(directives)?; + Ok(Field { + ty_of: type_of.into(), + args, + resolver, + }) +} + +fn to_args(field_definition: &FieldDefinition) -> BTreeMap { + let mut args = BTreeMap::new(); + + for arg in field_definition.arguments.iter() { + let arg_name = pos_name_to_string(&arg.node.name); + let arg_val = to_arg(&arg.node); + args.insert(arg_name, arg_val); + } + + args +} + +fn to_arg(input_value_definition: &InputValueDefinition) -> Arg { + let type_of = &input_value_definition.ty.node; + + let default_value = if let Some(pos) = input_value_definition.default_value.as_ref() { + let value = &pos.node; + serde_json::to_value(value).ok() + } else { + None + }; + Arg { + type_of: type_of.into(), + default_value, + } +} + +trait HasName { + fn name(&self) -> &Positioned; +} +impl HasName for FieldDefinition { + fn name(&self) -> &Positioned { + &self.name + } +} +impl HasName for InputValueDefinition { + fn name(&self) -> &Positioned { + &self.name + } +} + +trait FieldLike { + fn type_of(&self) -> &Type; + fn description(&self) -> &Option>; + fn directives(&self) -> &[Positioned]; +} +impl FieldLike for FieldDefinition { + fn type_of(&self) -> &Type { + &self.ty.node + } + fn description(&self) -> &Option> { + &self.description + } + fn directives(&self) -> &[Positioned] { + &self.directives + } +} +impl FieldLike for InputValueDefinition { + fn type_of(&self) -> &Type { + &self.ty.node + } + fn description(&self) -> &Option> { + &self.description + } + fn directives(&self) -> &[Positioned] { + &self.directives + } +} +trait ObjectLike { + fn fields(&self) -> &Vec>; +} +impl ObjectLike for ObjectType { + fn fields(&self) -> &Vec> { + &self.fields + } +} +impl ObjectLike for InterfaceType { + fn fields(&self) -> &Vec> { + &self.fields + } +} diff --git a/projects/ssddOnTop/src/hasher.rs b/projects/ssddOnTop/src/hasher.rs new file mode 100644 index 0000000..f157570 --- /dev/null +++ b/projects/ssddOnTop/src/hasher.rs @@ -0,0 +1,17 @@ +use fxhash::FxHasher; +use std::hash::Hasher; + +#[derive(Default)] +pub struct MyHasher { + hasher: FxHasher, +} + +impl Hasher for MyHasher { + fn finish(&self) -> u64 { + self.hasher.finish() + } + + fn write(&mut self, bytes: &[u8]) { + self.hasher.write(bytes) + } +} diff --git a/projects/ssddOnTop/src/helpers/headers.rs b/projects/ssddOnTop/src/helpers/headers.rs new file mode 100644 index 0000000..f28d827 --- /dev/null +++ b/projects/ssddOnTop/src/helpers/headers.rs @@ -0,0 +1,62 @@ +use crate::config::KeyValue; +use crate::mustache::model::Mustache; +use anyhow::Result; +use reqwest::header::HeaderName; + +pub type MustacheHeaders = Vec<(HeaderName, Mustache)>; + +pub fn to_mustache_headers(headers: &[KeyValue]) -> Result { + let mut ans = vec![]; + for key_value in headers { + let name = HeaderName::from_bytes(key_value.key.as_bytes())?; + let value = Mustache::parse(key_value.value.as_str()); + let header = (name, value); + ans.push(header); + } + Ok(ans) +} + +#[cfg(test)] +mod tests { + use super::to_mustache_headers; + use crate::config::KeyValue; + use crate::mustache::model::Mustache; + use anyhow::Result; + use reqwest::header::HeaderName; + + #[test] + fn valid_headers() -> Result<()> { + let input: Vec = serde_json::from_str( + r#"[{"key": "a", "value": "str"}, {"key": "b", "value": "123"}]"#, + )?; + + let headers = to_mustache_headers(&input)?; + + assert_eq!( + headers, + vec![ + (HeaderName::from_bytes(b"a")?, Mustache::parse("str")), + (HeaderName::from_bytes(b"b")?, Mustache::parse("123")) + ] + ); + + Ok(()) + } + + #[test] + fn not_valid_due_to_utf8() { + let input: Vec = + serde_json::from_str(r#"[{"key": "😅", "value": "str"}, {"key": "b", "value": "🦀"}]"#) + .unwrap(); + let error = to_mustache_headers(&input).unwrap_err(); + + // HeaderValue should be parsed just fine despite non-visible ascii symbols + // range see https://github.com/hyperium/http/issues/519 + assert_eq!( + error.to_string(), + r"Validation Error +• invalid HTTP header name [😅] +" + ); + } +} diff --git a/projects/ssddOnTop/src/helpers/mod.rs b/projects/ssddOnTop/src/helpers/mod.rs new file mode 100644 index 0000000..7b67a24 --- /dev/null +++ b/projects/ssddOnTop/src/helpers/mod.rs @@ -0,0 +1 @@ +pub mod headers; diff --git a/projects/ssddOnTop/src/http/headers.rs b/projects/ssddOnTop/src/http/headers.rs new file mode 100644 index 0000000..c563141 --- /dev/null +++ b/projects/ssddOnTop/src/http/headers.rs @@ -0,0 +1,38 @@ +use std::str::FromStr; + +#[derive(Clone, Debug, Default)] +pub struct HeaderMap(hyper::header::HeaderMap); + +impl From for hyper::header::HeaderMap { + fn from(val: HeaderMap) -> Self { + val.0 + } +} + +/*impl Into for HeaderMap { + fn into(self) -> reqwest::header::HeaderMap { + let mut map = reqwest::header::HeaderMap::new(); + for (k, v) in self.0.iter() { + map.insert(k.as_str().parse().unwrap(),v.as_bytes().to_vec().into()); + } + map + } +}*/ + +impl From for HeaderMap { + fn from(value: http::HeaderMap) -> Self { + Self(value) + } +} +impl From for HeaderMap { + fn from(value: reqwest::header::HeaderMap) -> Self { + let mut map = hyper::header::HeaderMap::new(); + for (k, v) in value.iter() { + map.insert( + hyper::header::HeaderName::from_str(k.as_str()).unwrap(), + hyper::header::HeaderValue::from_str(v.to_str().unwrap()).unwrap(), + ); + } + Self(map) + } +} diff --git a/projects/ssddOnTop/src/http/method.rs b/projects/ssddOnTop/src/http/method.rs new file mode 100644 index 0000000..95242cc --- /dev/null +++ b/projects/ssddOnTop/src/http/method.rs @@ -0,0 +1,73 @@ +use serde::{Deserialize, Serialize}; +use strum_macros::Display; + +#[derive( + Clone, + Debug, + Serialize, + Deserialize, + PartialEq, + Eq, + Hash, + Default, + Display, + schemars::JsonSchema, +)] +pub enum Method { + #[default] + GET, + POST, + PUT, + PATCH, + DELETE, + HEAD, + OPTIONS, + CONNECT, + TRACE, +} + +impl From for Method { + fn from(value: hyper::Method) -> Self { + match value { + hyper::Method::GET => Method::GET, + hyper::Method::POST => Method::POST, + hyper::Method::PUT => Method::PUT, + hyper::Method::PATCH => Method::PATCH, + hyper::Method::DELETE => Method::DELETE, + hyper::Method::HEAD => Method::HEAD, + hyper::Method::OPTIONS => Method::OPTIONS, + hyper::Method::CONNECT => Method::CONNECT, + hyper::Method::TRACE => Method::TRACE, + _ => unreachable!(), + } + } +} + +impl Method { + pub fn into_reqwest(self) -> reqwest::Method { + match self { + Method::GET => reqwest::Method::GET, + Method::POST => reqwest::Method::POST, + Method::PUT => reqwest::Method::PUT, + Method::PATCH => reqwest::Method::PATCH, + Method::DELETE => reqwest::Method::DELETE, + Method::HEAD => reqwest::Method::HEAD, + Method::OPTIONS => reqwest::Method::OPTIONS, + Method::CONNECT => reqwest::Method::CONNECT, + Method::TRACE => reqwest::Method::TRACE, + } + } + pub fn to_hyper(self) -> hyper::Method { + match self { + Method::GET => hyper::Method::GET, + Method::POST => hyper::Method::POST, + Method::PUT => hyper::Method::PUT, + Method::PATCH => hyper::Method::PATCH, + Method::DELETE => hyper::Method::DELETE, + Method::HEAD => hyper::Method::HEAD, + Method::OPTIONS => hyper::Method::OPTIONS, + Method::CONNECT => hyper::Method::CONNECT, + Method::TRACE => hyper::Method::TRACE, + } + } +} diff --git a/projects/ssddOnTop/src/http/mod.rs b/projects/ssddOnTop/src/http/mod.rs new file mode 100644 index 0000000..d926908 --- /dev/null +++ b/projects/ssddOnTop/src/http/mod.rs @@ -0,0 +1,9 @@ +mod headers; +pub mod method; +mod query_encoder; +mod req_template; +pub mod request; +pub mod request_handler; +pub mod response; + +pub use req_template::*; diff --git a/projects/ssddOnTop/src/http/query_encoder.rs b/projects/ssddOnTop/src/http/query_encoder.rs new file mode 100644 index 0000000..0ee47e5 --- /dev/null +++ b/projects/ssddOnTop/src/http/query_encoder.rs @@ -0,0 +1,257 @@ +use crate::path::ValueString; + +/// Defines different strategies for encoding query parameters. +#[derive(Default, Debug, Clone)] +pub enum QueryEncoder { + /// Encodes the query list as key=value1,value2,value3,... + CommaSeparated, + /// Encodes the query list by repeating the key for each value: + /// key=value1&key=value2&key=value3&... + #[default] + RepeatedKey, +} + +impl QueryEncoder { + pub fn encode(&self, key: &str, raw_value: Option) -> String { + if let Some(value) = raw_value { + match &value { + ValueString::Value(val) => self.encode_const_value(key, val.serde()), + ValueString::String(val) => format!("{}={}", key, val), + } + } else { + key.to_owned() + } + } + fn encode_const_value(&self, key: &str, value: &serde_json::Value) -> String { + match self { + QueryEncoder::CommaSeparated => match value { + serde_json::Value::Array(list) if !list.is_empty() => { + let encoded_values: Vec = + list.iter().filter_map(convert_value).collect(); + + if encoded_values.is_empty() { + key.to_string() + } else { + format!("{}={}", key, encoded_values.join(",")) + } + } + _ => convert_value(value) + .map(|val| format!("{}={}", key, val)) + .unwrap_or(key.to_string()), + }, + QueryEncoder::RepeatedKey => match value { + serde_json::Value::Array(list) if !list.is_empty() => { + let encoded_values: Vec = list + .iter() + .map(|val| self.encode_const_value(key, val)) + .collect(); + if encoded_values.is_empty() { + key.to_string() + } else { + encoded_values.join("&") + } + } + _ => convert_value(value) + .map(|val| format!("{}={}", key, val)) + .unwrap_or(key.to_string()), + }, + } + } +} + +pub fn convert_value(value: &serde_json::Value) -> Option { + match value { + serde_json::Value::String(s) => Some(s.to_string()), + serde_json::Value::Number(n) => Some(n.to_string()), + serde_json::Value::Bool(b) => Some(b.to_string()), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use std::borrow::Cow; + + use serde_json::Value; + + use super::*; + + #[test] + fn test_encode_comma_separated_arg() { + let encoder = QueryEncoder::CommaSeparated; + let values = Value::Array(vec![ + Value::Number(12.into()), + Value::Number(42.into()), + Value::Number(13.into()), + ]); + let binding = crate::value::Value::new(values); + let arg_raw_value = Some(ValueString::Value(Cow::Borrowed(&binding))); + + let actual = encoder.encode("key", arg_raw_value); + let expected = "key=12,42,13".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_repeated_key_value_arg() { + let encoder = QueryEncoder::RepeatedKey; + let values = Value::Array(vec![ + Value::Number(12.into()), + Value::Number(42.into()), + Value::Number(13.into()), + ]); + let binding = crate::value::Value::new(values); + let arg_raw_value = Some(ValueString::Value(Cow::Borrowed(&binding))); + + let actual = encoder.encode("key", arg_raw_value); + let expected = "key=12&key=42&key=13".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_env_var() { + let encoder = QueryEncoder::default(); + let raw_value = Some(ValueString::String("env_value".into())); + + let actual = encoder.encode("key", raw_value); + let expected = "key=env_value".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_var() { + let encoder = QueryEncoder::default(); + let raw_value = Some(ValueString::String("var_value".into())); + + let actual = encoder.encode("key", raw_value); + let expected = "key=var_value".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_none() { + let encoder = QueryEncoder::default(); + let raw_value: Option = None; + + let actual = encoder.encode("key", raw_value); + let expected = "key".to_owned(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_comma_separated_strategy() { + let key = "ids"; + let values = Value::Array(vec![ + Value::String("1".to_string()), + Value::String("2".to_string()), + Value::String("3".to_string()), + ]); + let strategy = QueryEncoder::CommaSeparated; + + let actual = strategy.encode_const_value(key, &values); + let expected = "ids=1,2,3".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_repeated_key_strategy() { + let key = "ids"; + let values = Value::Array(vec![ + Value::String("1".to_string()), + Value::String("2".to_string()), + Value::String("3".to_string()), + ]); + let strategy = QueryEncoder::RepeatedKey; + + let actual = strategy.encode_const_value(key, &values); + let expected = "ids=1&ids=2&ids=3".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_mixed_values_comma_separated() { + let key = "values"; + let values = Value::Array(vec![ + Value::String("string".to_string()), + Value::Number(42.into()), + Value::Bool(true), + ]); + let strategy = QueryEncoder::CommaSeparated; + + let actual = strategy.encode_const_value(key, &values); + let expected = "values=string,42,true".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_mixed_values_repeated_key() { + let key = "values"; + let values = Value::Array(vec![ + Value::String("string".to_string()), + Value::Number(42.into()), + Value::Bool(true), + ]); + let strategy = QueryEncoder::RepeatedKey; + + let actual = strategy.encode_const_value(key, &values); + let expected = "values=string&values=42&values=true".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_empty_list_comma_separated() { + let key = "empty"; + let values = Value::Array(vec![]); + let strategy = QueryEncoder::CommaSeparated; + + let actual = strategy.encode_const_value(key, &values); + let expected = "empty".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_empty_list_repeated_key() { + let key = "empty"; + let values = Value::Array(vec![]); + let strategy = QueryEncoder::RepeatedKey; + + let actual = strategy.encode_const_value(key, &values); + let expected = "empty".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_single_value_comma_separated() { + let key = "single"; + let values = Value::Array(vec![Value::String("value".to_string())]); + let strategy = QueryEncoder::CommaSeparated; + + let actual = strategy.encode_const_value(key, &values); + let expected = "single=value".to_string(); + + assert_eq!(actual, expected); + } + + #[test] + fn test_encode_single_value_repeated_key() { + let key = "single"; + let values = Value::Array(vec![Value::String("value".to_string())]); + let strategy = QueryEncoder::RepeatedKey; + + let actual = strategy.encode_const_value(key, &values); + let expected = "single=value".to_string(); + + assert_eq!(actual, expected); + } +} diff --git a/projects/ssddOnTop/src/http/req_template.rs b/projects/ssddOnTop/src/http/req_template.rs new file mode 100644 index 0000000..8cb1905 --- /dev/null +++ b/projects/ssddOnTop/src/http/req_template.rs @@ -0,0 +1,210 @@ +use crate::endpoint::Endpoint; +use crate::hasher::MyHasher; +use crate::http::query_encoder::QueryEncoder; +use crate::ir::eval_ctx::EvalContext; +use crate::ir::IoId; +use crate::mustache::model::{Mustache, Segment}; +use crate::path::{PathString, PathValue, ValueString}; +use crate::value::Value; +use hyper::Method; +use std::borrow::Cow; +use std::hash::{Hash, Hasher}; +use url::Url; + +#[derive(Debug, Clone)] +pub struct RequestTemplate { + pub root_url: Mustache, + pub query: Vec, + pub method: Method, + pub query_encoder: QueryEncoder, + // pub headers: MustacheHeaders, +} + +#[derive(Debug, Clone)] +pub struct Query { + pub key: String, + pub value: Mustache, + pub skip_empty: bool, +} + +impl TryFrom for RequestTemplate { + type Error = anyhow::Error; + fn try_from(endpoint: Endpoint) -> anyhow::Result { + let path = Mustache::parse(endpoint.path.as_str()); + let query = endpoint + .query + .iter() + .map(|(k, v, skip)| { + Ok(Query { + key: k.as_str().to_string(), + value: Mustache::parse(v.as_str()), + skip_empty: *skip, + }) + }) + .collect::>>()?; + let method = endpoint.method.clone().to_hyper(); + /* let headers = endpoint + .headers + .iter() + .map(|(k, v)| Ok((k.clone(), Mustache::parse(v.to_str()?)))) + .collect::>>()?;*/ + + Ok(Self { + root_url: path, + query, + method, + // headers, + query_encoder: Default::default(), + }) + } +} + +impl RequestTemplate { + pub fn cache_key(&self, ctx: &EvalContext) -> IoId { + let mut hasher = MyHasher::default(); + let state = &mut hasher; + + self.method.hash(state); + + /* for (name, value) in ctx.headers().iter() { + name.hash(state); + value.hash(state); + }*/ + + let url = self.create_url(ctx).unwrap(); + url.hash(state); + + IoId::new(hasher.finish()) + } +} + +struct ValueStringEval(std::marker::PhantomData); +impl Default for ValueStringEval { + fn default() -> Self { + Self(std::marker::PhantomData) + } +} + +impl<'a, A: PathValue> ValueStringEval { + fn eval(&self, mustache: &Mustache, in_value: &'a A) -> Option> { + mustache + .segments() + .iter() + .filter_map(|segment| match segment { + Segment::Literal(text) => Some(ValueString::Value(Cow::Owned(Value::new( + serde_json::Value::String(text.to_owned()), + )))), + Segment::Expression(parts) => in_value.raw_value(parts), + }) + .next() // Return the first value that is found + } +} + +impl RequestTemplate { + /// Creates a URL for the context + /// Fills in all the mustache templates with required values. + fn create_url(&self, ctx: &C) -> anyhow::Result { + let mut url = url::Url::parse(self.root_url.render(ctx).as_str())?; + if self.query.is_empty() && self.root_url.is_const() { + return Ok(url); + } + + // evaluates mustache template and returns the values evaluated by mustache + // template. + let mustache_eval = ValueStringEval::default(); + + let extra_qp = self.query.iter().filter_map(|query| { + let key = &query.key; + let value = &query.value; + let skip = query.skip_empty; + let parsed_value = mustache_eval.eval(value, ctx); + if skip && parsed_value.is_none() { + None + } else { + Some(self.query_encoder.encode(key, parsed_value)) + } + }); + + let base_qp = url + .query_pairs() + .filter_map(|(k, v)| if v.is_empty() { None } else { Some((k, v)) }); + + let qp_string = base_qp.map(|(k, v)| format!("{}={}", k, v)); + let qp_string = qp_string.chain(extra_qp).fold("".to_string(), |str, item| { + if str.is_empty() { + item + } else if item.is_empty() { + str + } else { + format!("{}&{}", str, item) + } + }); + + if qp_string.is_empty() { + url.set_query(None); + Ok(url) + } else { + url.set_query(Some(qp_string.as_str())); + Ok(url) + } + } + + /// Checks if the template has any mustache templates or not + /// Returns true if there are not templates + pub fn is_const(&self) -> bool { + self.root_url.is_const() && self.query.iter().all(|query| query.value.is_const()) + } + + /// Creates a Request for the given context + pub fn to_request( + &self, + ctx: &C, + ) -> anyhow::Result { + // Create url + let url = self.create_url(ctx)?; + let method = self.method.clone(); + let req = reqwest::Request::new( + crate::http::method::Method::from(method).into_reqwest(), + url, + ); + // req = self.set_headers(req, ctx); + // req = self.set_body(req, ctx)?; + + Ok(req) + } + + /* /// Sets the headers for the request + fn set_headers( + &self, + mut req: reqwest::Request, + ctx: &C, + ) -> reqwest::Request { + let headers = self.create_headers(ctx); + if !headers.is_empty() { + req.headers_mut().extend(headers); + } + + let headers = req.headers_mut(); + // We want to set the header value based on encoding + // TODO: potential of optimizations. + // Can set content-type headers while creating the request template + if self.method != reqwest::Method::GET { + headers.insert( + reqwest::header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + } + + headers.extend(ctx.headers().to_owned()); + req + }*/ + + pub fn new(root_url: &str) -> anyhow::Result { + Ok(Self { + root_url: Mustache::parse(root_url), + query: Default::default(), + method: Method::GET, + query_encoder: Default::default(), + }) + } +} diff --git a/projects/ssddOnTop/src/http/request.rs b/projects/ssddOnTop/src/http/request.rs new file mode 100644 index 0000000..bc9227e --- /dev/null +++ b/projects/ssddOnTop/src/http/request.rs @@ -0,0 +1,24 @@ +use crate::http::method::Method; +use http_body_util::BodyExt; +use hyper::body::Incoming; + +pub struct Request { + pub method: Method, + pub url: hyper::Uri, + pub headers: hyper::HeaderMap, + pub body: bytes::Bytes, +} + +impl Request { + pub async fn from_hyper(req: hyper::Request) -> anyhow::Result { + let (part, body) = req.into_parts(); + let body = body.collect().await?.to_bytes(); + + Ok(Self { + method: Method::from(part.method), + url: part.uri, + headers: part.headers, + body, + }) + } +} diff --git a/projects/ssddOnTop/src/http/request_handler.rs b/projects/ssddOnTop/src/http/request_handler.rs new file mode 100644 index 0000000..d775baf --- /dev/null +++ b/projects/ssddOnTop/src/http/request_handler.rs @@ -0,0 +1,56 @@ +use crate::app_ctx::AppCtx; +use crate::blueprint::model::{FieldName, TypeName}; +use crate::blueprint::FieldHash; +use crate::http::method::Method; +use crate::http::request::Request; +use crate::ir::eval_ctx::EvalContext; +use crate::request_context::RequestContext; +use crate::value::Value; +use bytes::Bytes; +use http_body_util::Full; +use crate::jit::model::PathFinder; + +pub async fn handle_request( + req: Request, + app_ctx: AppCtx, +) -> anyhow::Result>> { + let resp = match req.method { + Method::GET => hyper::Response::builder() + .status(hyper::StatusCode::OK) + .body(Full::new(Bytes::from(async_graphql::http::GraphiQLSource::build().finish())))?, + Method::POST => handle_gql_req(req, app_ctx).await?, + _ => hyper::Response::builder() + .status(hyper::StatusCode::METHOD_NOT_ALLOWED) + .body(Full::new(Bytes::from_static(b"Method Not Allowed")))?, + }; + Ok(resp) +} + +fn create_request_context(app_ctx: &AppCtx) -> RequestContext { + RequestContext::from(app_ctx) +} + +async fn handle_gql_req( + request: Request, + app_ctx: AppCtx, +) -> anyhow::Result>> { + let gql_req: async_graphql::Request = serde_json::from_slice(&request.body)?; + let doc = async_graphql::parser::parse_query(&gql_req.query)?; + let req_ctx = create_request_context(&app_ctx); + if let Some(_) = app_ctx.blueprint.schema.query.as_ref() { + let eval_ctx = EvalContext::new(&req_ctx); + let path_finder = PathFinder::new(doc, &app_ctx.blueprint); + let fields = path_finder.exec().await; + let borrowed_fields = fields.to_borrowed(); + + let resolved = fields.resolve(eval_ctx).await?; + let borrowed_val = resolved.to_borrowed(); + Ok(hyper::Response::new(Full::new(Bytes::from( + borrowed_val.finalize().to_string(), + )))) + } else { + Ok(hyper::Response::new(Full::new(Bytes::from_static( + b"Only queries are suppored", + )))) + } +} diff --git a/projects/ssddOnTop/src/http/response.rs b/projects/ssddOnTop/src/http/response.rs new file mode 100644 index 0000000..776894e --- /dev/null +++ b/projects/ssddOnTop/src/http/response.rs @@ -0,0 +1,128 @@ +use crate::http::headers::HeaderMap; +use anyhow::Result; +use async_graphql_value::{ConstValue, Name}; +use derive_setters::Setters; +use http::StatusCode; +use http_body_util::{BodyExt, Full}; +use hyper::body::Bytes; + +#[derive(Clone, Debug, Default, Setters)] +pub struct Response { + pub status: StatusCode, + pub headers: HeaderMap, + pub body: Body, +} + +// Trait to convert a serde_json_borrow::Value to a ConstValue. +// serde_json_borrow::Value is a borrowed version of serde_json::Value. +// It has a limited lifetime tied to the input JSON, making it more +// efficient. Benchmarking is required to determine the performance If any +// change is made. + +pub trait FromValue { + fn from_value(value: serde_json_borrow::Value) -> Self; +} + +impl FromValue for ConstValue { + fn from_value(value: serde_json_borrow::Value) -> Self { + match value { + serde_json_borrow::Value::Null => ConstValue::Null, + serde_json_borrow::Value::Bool(b) => ConstValue::Boolean(b), + serde_json_borrow::Value::Number(n) => ConstValue::Number(n.into()), + serde_json_borrow::Value::Str(s) => ConstValue::String(s.into()), + serde_json_borrow::Value::Array(a) => { + ConstValue::List(a.into_iter().map(|v| Self::from_value(v)).collect()) + } + serde_json_borrow::Value::Object(o) => ConstValue::Object( + o.into_vec() + .into_iter() + .map(|(k, v)| (Name::new(k), Self::from_value(v))) + .collect(), + ), + } + } +} + +impl Response { + pub async fn from_reqwest(resp: reqwest::Response) -> Result { + let status = StatusCode::from_u16(resp.status().as_u16())?; + let headers = HeaderMap::from(resp.headers().to_owned()); + let body = resp.bytes().await?; + Ok(Response { + status, + headers, + body, + }) + } + + pub async fn from_hyper(resp: hyper::Response>) -> Result { + let status = resp.status(); + let headers = HeaderMap::from(resp.headers().to_owned()); + let body = resp.into_body().collect().await?.to_bytes(); + Ok(Response { + status, + headers, + body, + }) + } + + pub fn empty() -> Self { + Response { + status: StatusCode::OK, + headers: HeaderMap::default(), + body: Bytes::new(), + } + } + pub fn to_serde_json(self) -> Result> { + if self.body.is_empty() { + return Ok(Response { + status: self.status, + headers: self.headers, + body: serde_json::Value::Null, + }); + } + let body: serde_json::Value = serde_json::from_slice(&self.body)?; + Ok(Response { + status: self.status, + headers: self.headers, + body, + }) + } + + pub fn to_json(self) -> Result> { + if self.body.is_empty() { + return Ok(Response { + status: self.status, + headers: self.headers, + body: Default::default(), + }); + } + // Note: We convert the body to a serde_json_borrow::Value for better + // performance. Warning: Do not change this to direct conversion to `T` + // without benchmarking the performance impact. + let body: serde_json_borrow::Value = serde_json::from_slice(&self.body)?; + let body = T::from_value(body); + Ok(Response { + status: self.status, + headers: self.headers, + body, + }) + } + + pub fn to_resp_string(self) -> Result> { + Ok(Response:: { + body: String::from_utf8(self.body.to_vec())?, + status: self.status, + headers: self.headers, + }) + } +} + +impl From> for hyper::Response> { + fn from(resp: Response) -> Self { + let mut response = hyper::Response::new(Full::new(resp.body)); + *response.headers_mut() = resp.headers.into(); + *response.status_mut() = resp.status; + response + } +} diff --git a/projects/ssddOnTop/src/ir/discriminator.rs b/projects/ssddOnTop/src/ir/discriminator.rs new file mode 100644 index 0000000..a19b96c --- /dev/null +++ b/projects/ssddOnTop/src/ir/discriminator.rs @@ -0,0 +1,1310 @@ +use std::collections::HashSet; +use std::fmt::Write; + +use anyhow::{anyhow, bail, Result}; +use async_graphql::Value; +use derive_more::{BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Not}; +use indenter::indented; +use indexmap::IndexMap; + +use crate::config::Type1; +use crate::json::{JsonLike, JsonObjectLike}; + +pub trait TypedValue<'a> { + type Error; + + fn get_type_name(&'a self) -> Option<&'a str>; + fn set_type_name(&'a mut self, type_name: String) -> Result<(), Self::Error>; +} + +const TYPENAME_FIELD: &str = "__typename"; + +impl<'json, T> TypedValue<'json> for T +where + T: JsonLike<'json>, +{ + type Error = anyhow::Error; + + fn get_type_name(&'json self) -> Option<&'json str> { + self.as_object() + .and_then(|obj| obj.get_key(TYPENAME_FIELD)) + .and_then(|val| val.as_str()) + } + + fn set_type_name(&'json mut self, type_name: String) -> Result<(), Self::Error> { + if let Some(obj) = self.as_object_mut() { + obj.insert_key(TYPENAME_FIELD, T::string(type_name.into())); + + Ok(()) + } else { + bail!("Expected object") + } + } +} + +/// Resolver for type member of a union. +/// Based on type definitions and the provided value, it can +/// resolve the type of the value. +/// +/// ## Resolution algorithm +/// +/// The resolution algorithm is based on the following points: +/// - The common set of fields is the set of all fields that are defined in the +/// type members of the union. +/// - If the resolved value is a list, then the resolution should be run for +/// every entry in the list as a separate value. +/// - If a field from the common set is present in the resolved value, then the +/// result type is one of the types that have this field. +/// - If a field from the common set is required in some types and this field is +/// not present in the resolved value, then the result type is not one of +/// those types. +/// - By repeating the checks from above for every field in the common set, we +/// will end up with a smaller set of possible types and, more likely, with +/// only a single possible type. + +#[derive(Clone)] +pub struct Discriminator { + /// List of all types that are members of the Union. + types: Vec, + /// Set of all fields that are part of types with + /// the [FieldInfo] about their relations to types. + fields_info: IndexMap, +} + +impl std::fmt::Debug for Discriminator { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("Discriminator {\n")?; + f.write_str("types: ")?; + f.write_fmt(format_args!("{:?}\n", &self.types))?; + f.write_str("fields_info:\n")?; + + { + let f = &mut indented(f); + for (field_name, field_info) in &self.fields_info { + f.write_fmt(format_args!("{field_name}:\n"))?; + field_info.display_types(&mut indented(f), &self.types)?; + } + } + + f.write_str("}\n")?; + + Ok(()) + } +} + +/// Represents the relations between a field and a type: +/// - `presented_in` - the field is part of the type definition, regardless of +/// nullability. +/// - `required_in` - the field is part of the type and is non-nullable. +#[derive(Default, Debug, Clone)] +struct FieldInfo { + presented_in: Repr, + required_in: Repr, +} + +impl FieldInfo { + /// Displays the [Repr] data inside FieldInfo as type names instead of the + /// raw underlying representation. + fn display_types(&self, f: &mut dyn Write, types: &[String]) -> std::fmt::Result { + f.write_str("presented_in: ")?; + f.write_fmt(format_args!( + "{:?}\n", + self.presented_in.covered_types(types) + ))?; + f.write_str("required_in: ")?; + f.write_fmt(format_args!( + "{:?}\n", + self.required_in.covered_types(types) + ))?; + + Ok(()) + } +} + +impl Discriminator { + pub fn new(union_name: &str, union_types: &[(&str, &Type1)]) -> Result { + if union_types.len() > usize::BITS as usize { + return Err(anyhow::anyhow!( + "Union {union_name} defines more than {} types that is not supported", + usize::BITS + )); + } + + let mut types = Vec::with_capacity(union_types.len()); + let mut fields_info: IndexMap = IndexMap::new(); + + // TODO: do we need to check also added_fields? + for (i, (type_name, type_)) in union_types.iter().enumerate() { + types.push(type_name.to_string()); + for (field_name, field) in type_.fields.iter() { + let info = fields_info.entry(field_name.to_string()).or_default(); + + let repr = Repr::from_type_index(i); + + // Add information for this field indicating that it is present in this type. + info.presented_in |= repr; + + // And information if it is required in this type. + if !field.ty_of.is_nullable() { + info.required_in |= repr; + } + } + } + + // Validation to ensure no two types have the same set of fields. + { + let mut duplicates = IndexMap::new(); + + for (_, type_) in union_types.iter() { + let mut repr = Repr::all_covered(union_types.len()); + for field_name in type_.fields.keys() { + if let Some(info) = fields_info.get(field_name.as_str()) { + repr &= info.presented_in; + } + } + + if repr.is_covering_multiple_types() { + let types = repr.covered_types(&types); + + // If every field in this type is also present in some other type, + // check if the other types have the same number of fields. + let same_types: Vec<_> = types + .into_iter() + .filter(|type_name| { + let other_type = union_types.iter().find(|(name, _)| name == type_name); + + if let Some((_, other_type)) = other_type { + other_type.fields.len() == type_.fields.len() + } else { + false + } + }) + .collect(); + + // One type is already the current type itself. + if same_types.len() > 1 { + duplicates.insert(same_types[0], same_types); + } + } + } + + if !duplicates.is_empty() { + return Err(anyhow!("Union have equal types")); + } + } + + // Strip fields that are not valuable for the discriminator. + let fields_info = { + let mut seen_required_in: HashSet = HashSet::new(); + + fields_info + .into_iter() + .filter(|(_, field_info)| { + let drop = + // If a field is present in all types, it does not help in determining the type of the value. + field_info + .presented_in + .is_covering_all_types(union_types.len()) + // If multiple fields are required in the same set of types, we can keep only one of these fields. + || (!field_info.required_in.is_empty() && seen_required_in.contains(&field_info.required_in)); + + seen_required_in.insert(field_info.required_in); + + !drop + }) + .collect() + }; + + let discriminator = Self { fields_info, types }; + + tracing::debug!( + "Generated discriminator for union type '{union_name}':\n{discriminator:?}", + ); + + Ok(discriminator) + } + + pub fn resolve_type(&self, value: &Value) -> Result<&str> { + let Value::Object(obj) = value else { + bail!("Value expected to be object"); + }; + + let mut possible_types = Repr::all_covered(self.types.len()); + + for (field, info) in &self.fields_info { + if obj.contains_key(field.as_str()) { + possible_types &= info.presented_in; + } else { + possible_types &= !info.required_in; + } + + if possible_types.is_empty() { + // No possible types. Something is wrong with the resolved value. + bail!("Failed to find corresponding type for value") + } + + if !possible_types.is_covering_multiple_types() { + // We've got only one possible type, so return it, + // even though the value could be completely wrong if we check other fields. + // We want to cover positive cases and do it as soon as possible, + // and the wrong value will likely be incorrect to use later anyway. + return Ok(possible_types.first_covered_type(&self.types)); + } + } + + // We have multiple possible types. Return the first one + // that is defined earlier in the config. + Ok(possible_types.first_covered_type(&self.types)) + } +} + +/// Representation for a set of types if some condition is met. +/// The condition is represented as a bit inside the `usize` number, +/// where the bit position from the right in the binary representation of +/// `usize` is the index of the type in the set. If the value of the bit is +/// 1, then the condition is met. +#[derive( + Copy, + Clone, + Default, + PartialEq, + Eq, + Hash, + BitAnd, + BitOr, + BitXor, + BitAndAssign, + BitOrAssign, + BitXorAssign, + Not, +)] +struct Repr(usize); + +impl std::fmt::Debug for Repr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_fmt(format_args!("{:0b}", self.0)) + } +} + +impl Repr { + /// Create a new Repr where the condition is met for every type. + fn all_covered(len: usize) -> Self { + Self((1 << len) - 1) + } + + /// Create a new Repr where the condition is met + /// for the type with the given index. + fn from_type_index(index: usize) -> Self { + Self(1 << index) + } + + /// Search for the first type in the list for which the condition is met. + fn first_covered_type<'types>(&self, types: &'types [String]) -> &'types str { + &types[self.0.trailing_zeros() as usize] + } + + /// Returns a list of all types for which the condition is met. + fn covered_types<'types>(&self, types: &'types [String]) -> Vec<&'types str> { + let mut x = *self; + let mut result = Vec::new(); + + while x.0 != 0 { + result.push(x.first_covered_type(types)); + + x.0 = x.0 & (x.0 - 1); + } + + result + } + + /// Check if the condition is not met for any type. + fn is_empty(&self) -> bool { + self.0 == 0 + } + + /// Check if the condition is met for every type. + fn is_covering_all_types(&self, len: usize) -> bool { + self.0.trailing_ones() == len as u32 + } + + /// Check if the condition is met for more than one type. + fn is_covering_multiple_types(&self) -> bool { + !self.0.is_power_of_two() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::blueprint::wrapping_type::Type; + use crate::config::Field; + use async_graphql::Value; + use serde_json::json; + use test_log::test; + + #[test] + fn test_single_distinct_field_optional() { + let foo = Type1::default().fields(vec![("foo", Field::default())]); + let bar = Type1::default().fields(vec![("bar", Field::default())]); + let types = vec![("Foo", &foo), ("Bar", &bar)]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "foo": "test" })).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "bar": "test" })).unwrap()) + .unwrap(), + "Bar" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "Foo" + ); + } + + #[test] + fn test_single_distinct_field_required() { + let foo = Type1::default().fields(vec![( + "foo", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + )]); + let bar = Type1::default().fields(vec![( + "bar", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + )]); + let types = vec![("Foo", &foo), ("Bar", &bar)]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "foo": "test" })).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "bar": "test" })).unwrap()) + .unwrap(), + "Bar" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "Bar" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "Bar" + ); + } + + #[test] + fn test_multiple_distinct_field_required() { + let a = Type1::default().fields(vec![ + ( + "a", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "ab", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "abab", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let b = Type1::default().fields(vec![ + ( + "b", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "ab", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "abab", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "ac", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let c = Type1::default().fields(vec![ + ( + "c", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "ac", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let types = vec![("A", &a), ("B", &b), ("C", &c)]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "a": 1, "ab": 1, "abab": 1 })).unwrap()) + .unwrap(), + "A" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "b": 1, "ab": 1, "abab": 1 })).unwrap()) + .unwrap(), + "B" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "c": 1, "ac": 1 })).unwrap()) + .unwrap(), + "C" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "a": 1, "b": 1, "c": 1 })).unwrap()) + .unwrap(), + "A" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "C" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "C" + ); + } + + #[test] + fn test_single_distinct_field_optional_and_shared_fields() { + let foo = Type1::default().fields(vec![ + ("a", Field::default()), + ("b", Field::default()), + ("foo", Field::default()), + ]); + let bar = Type1::default().fields(vec![ + ("a", Field::default()), + ("b", Field::default()), + ("bar", Field::default()), + ]); + let types = vec![("Foo", &foo), ("Bar", &bar)]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "a": 123, "b": true, "foo": "test" })).unwrap() + ) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "bar": "test" })).unwrap()) + .unwrap(), + "Bar" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "Foo" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "Foo" + ); + } + + #[test] + fn test_multiple_distinct_fields() { + let foo = Type1::default().fields(vec![ + ("a", Field::default()), + ("b", Field::default()), + ("foo", Field::default()), + ]); + let bar = Type1::default().fields(vec![("bar", Field::default())]); + let types = vec![("Foo", &foo), ("Bar", &bar)]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "b": 123, "foo": "test" })).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "bar": "test" })).unwrap()) + .unwrap(), + "Bar" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "unknown": { "foo": "bar" }, "a": 1 })).unwrap() + ) + .unwrap(), + "Foo" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "Foo" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "Foo" + ); + } + + #[test] + fn test_fields_intersection() { + let a = Type1::default().fields(vec![ + ("shared", Field::default()), + ("a", Field::default()), + ("aa", Field::default()), + ("aaa", Field::default()), + ]); + let b = Type1::default().fields(vec![ + ("shared", Field::default()), + ("b", Field::default()), + ("aa", Field::default()), + ]); + let c = Type1::default().fields(vec![ + ("shared", Field::default()), + ("c", Field::default()), + ("aaa", Field::default()), + ]); + let types = vec![("A", &a), ("B", &b), ("C", &c)]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "a": 1 })).unwrap()) + .unwrap(), + "A" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "b": 1, "aa": 1 })).unwrap()) + .unwrap(), + "B" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "c": 1, "aaa": 1 })).unwrap()) + .unwrap(), + "C" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "shared": 1, "a": 1, "b": 1, "c": 1 })).unwrap() + ) + .unwrap(), + "A" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "A" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "A" + ); + } + + #[test] + fn test_fields_protobuf_oneof() { + let var_var = Type1::default().fields(vec![("usual", Field::default())]); + let var0_var = Type1::default().fields(vec![ + ("usual", Field::default()), + ( + "payload", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let var1_var = Type1::default().fields(vec![ + ("usual", Field::default()), + ( + "command", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let var_var0 = Type1::default().fields(vec![ + ("usual", Field::default()), + ( + "flag", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let var_var1 = Type1::default().fields(vec![ + ("usual", Field::default()), + ( + "optPayload", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let var0_var0 = Type1::default().fields(vec![ + ("usual", Field::default()), + ( + "payload", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "flag", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let var1_var0 = Type1::default().fields(vec![ + ("usual", Field::default()), + ( + "command", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "flag", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let var0_var1 = Type1::default().fields(vec![ + ("usual", Field::default()), + ( + "payload", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "optPayload", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let var1_var1 = Type1::default().fields(vec![ + ("usual", Field::default()), + ( + "command", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ( + "optPayload", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let types = vec![ + ("Var_Var", &var_var), + ("Var0_Var", &var0_var), + ("Var1_Var", &var1_var), + ("Var_Var0", &var_var0), + ("Var_Var1", &var_var1), + ("Var0_Var0", &var0_var0), + ("Var1_Var0", &var1_var0), + ("Var0_Var1", &var0_var1), + ("Var1_Var1", &var1_var1), + ]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "usual": 1 })).unwrap()) + .unwrap(), + "Var_Var" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "usual": 1, "payload": 1 })).unwrap()) + .unwrap(), + "Var0_Var" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "usual": 1, "command": 2, "useless": 1 })).unwrap() + ) + .unwrap(), + "Var1_Var" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "usual": 1, "flag": true })).unwrap()) + .unwrap(), + "Var_Var0" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "usual": 1, "optPayload": 1, "a": 1, "b": 2 })) + .unwrap() + ) + .unwrap(), + "Var_Var1" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "usual": 1, "payload": 1, "flag": true })).unwrap() + ) + .unwrap(), + "Var0_Var0" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "usual": 1, "payload": 1, "optPayload": 1 })) + .unwrap() + ) + .unwrap(), + "Var0_Var1" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "usual": 1, "command": 1, "flag": true })).unwrap() + ) + .unwrap(), + "Var1_Var0" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "usual": 1, "command": 1, "optPayload": 1 })) + .unwrap() + ) + .unwrap(), + "Var1_Var1" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "usual": 1, "command": 1, "payload": 1 })).unwrap() + ) + .unwrap_err() + .to_string(), + "Failed to find corresponding type for value" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "Var_Var" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "Var_Var" + ); + } + + #[test] + fn test_additional_types() { + let type_a = Type1::default().fields(vec![ + ("uniqueA1", Field::default()), + ("common", Field::default()), + ]); + let type_b = Type1::default().fields(vec![ + ( + "uniqueB1", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ("common", Field::default()), + ]); + let type_c = Type1::default().fields(vec![ + ("uniqueC1", Field::default()), + ("uniqueC2", Field::default()), + ]); + let type_d = Type1::default().fields(vec![ + ("uniqueD1", Field::default()), + ("common", Field::default()), + ( + "uniqueD2", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + + let types = vec![ + ("TypeA", &type_a), + ("TypeB", &type_b), + ("TypeC", &type_c), + ("TypeD", &type_d), + ]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "uniqueA1": "value", "common": 1 })).unwrap() + ) + .unwrap(), + "TypeA" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "uniqueB1": true, "common": 2 })).unwrap()) + .unwrap(), + "TypeB" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "uniqueC1": "value1", "uniqueC2": "value2" })) + .unwrap() + ) + .unwrap(), + "TypeC" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json( + json!({ "uniqueD1": "value", "common": 3, "uniqueD2": false }) + ) + .unwrap() + ) + .unwrap(), + "TypeD" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type( + &Value::from_json( + json!({ "uniqueA1": "value", "uniqueB1": true, "common": 4 }) + ) + .unwrap() + ) + .unwrap(), + "TypeA" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "TypeA" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "TypeA" + ); + } + + #[test] + fn test_combination_of_shared_fields() { + let type_a = Type1::default().fields(vec![ + ("field1", Field::default()), + ("field2", Field::default()), + ]); + let type_b = Type1::default().fields(vec![ + ("field2", Field::default()), + ("field3", Field::default()), + ]); + let type_c = Type1::default().fields(vec![ + ("field1", Field::default()), + ("field3", Field::default()), + ]); + let type_d = Type1::default().fields(vec![ + ("field1", Field::default()), + ("field2", Field::default()), + ( + "field4", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + + let types = vec![ + ("TypeA", &type_a), + ("TypeB", &type_b), + ("TypeC", &type_c), + ("TypeD", &type_d), + ]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "field1": "value", "field2": "value" })).unwrap() + ) + .unwrap(), + "TypeA" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "field2": "value", "field3": "value" })).unwrap() + ) + .unwrap(), + "TypeB" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json(json!({ "field1": "value", "field3": "value" })).unwrap() + ) + .unwrap(), + "TypeC" + ); + + assert_eq!( + discriminator + .resolve_type( + &Value::from_json( + json!({ "field1": "value", "field2": "value", "field4": "value" }) + ) + .unwrap() + ) + .unwrap(), + "TypeD" + ); + + // ambiguous cases + assert_eq!( + discriminator + .resolve_type( + &Value::from_json( + json!({ "field1": "value", "field2": "value", "field3": "value" }) + ) + .unwrap() + ) + .unwrap_err() + .to_string(), + "Failed to find corresponding type for value" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({})).unwrap()) + .unwrap(), + "TypeA" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) + .unwrap(), + "TypeA" + ); + } + + #[test] + fn validation_number_of_types() { + let types: Vec<_> = (0..136) + .map(|i| (i.to_string(), Type1::default())) + .collect(); + let union_types: Vec<_> = types + .iter() + .map(|(name, type_)| (name.as_str(), type_)) + .collect(); + + assert_eq!( + Discriminator::new("BigUnion", &union_types) + .unwrap_err() + .to_string(), + format!( + "Validation Error +• Union BigUnion defines more than {} types that is not supported +", + usize::BITS + ) + ); + } + + #[test] + fn test_validation_equal_types() { + let a = Type1::default().fields(vec![("a", Field::default()), ("b", Field::default())]); + let b = Type1::default().fields(vec![ + ( + "a", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ("b", Field::default()), + ]); + let c = Type1::default().fields(vec![("a", Field::default()), ("b", Field::default())]); + let d = Type1::default().fields(vec![ + ("a", Field::default()), + ("b", Field::default()), + ( + "c", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + let e = Type1::default().fields(vec![("c", Field::default()), ("d", Field::default())]); + let f = Type1::default().fields(vec![ + ("c", Field::default()), + ( + "d", + Field { + ty_of: Type::default().into_required(), + ..Field::default() + }, + ), + ]); + + let types = vec![ + ("A", &a), + ("B", &b), + ("C", &c), + ("D", &d), + ("E", &e), + ("F", &f), + ]; + + assert_eq!( + Discriminator::new("Test", &types).unwrap_err().to_string(), + "Validation Error +• Union have equal types: A == B == C [Test] +• Union have equal types: E == F [Test] +" + ); + } + + #[test] + fn test_validation_non_object() { + let foo = Type1::default().fields(vec![("foo", Field::default())]); + let bar = Type1::default().fields(vec![("bar", Field::default())]); + let types = vec![("Foo", &foo), ("Bar", &bar)]; + + let discriminator = Discriminator::new("Test", &types).unwrap(); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!("string")).unwrap()) + .unwrap_err() + .to_string(), + "Value expected to be object" + ); + + assert_eq!( + discriminator + .resolve_type(&Value::from_json(json!(25)).unwrap()) + .unwrap_err() + .to_string(), + "Value expected to be object" + ); + } +} diff --git a/projects/ssddOnTop/src/ir/eval_ctx.rs b/projects/ssddOnTop/src/ir/eval_ctx.rs new file mode 100644 index 0000000..3a4ed43 --- /dev/null +++ b/projects/ssddOnTop/src/ir/eval_ctx.rs @@ -0,0 +1,68 @@ +use crate::request_context::RequestContext; +use crate::value::Value; +use std::borrow::Cow; + +#[derive(Clone)] +pub struct EvalContext<'a> { + // Context create for each GraphQL Request + pub request_ctx: &'a RequestContext, + + // graphql_ctx: &'a Ctx, + pub graphql_ctx_value: Option, + + graphql_ctx_args: Option, +} + +impl<'a> EvalContext<'a> { + pub fn new(request_ctx: &'a RequestContext) -> Self { + Self { + request_ctx, + graphql_ctx_value: None, + graphql_ctx_args: None, + } + } + pub fn with_value(self, value: Value) -> Self { + Self { + graphql_ctx_value: Some(value), + ..self + } + } + pub fn with_args(self, args: Value) -> Self { + Self { + graphql_ctx_args: Some(args), + ..self + } + } + pub fn path_arg>(&self, path: &[T]) -> Option> { + let args = self.graphql_ctx_args.as_ref()?; + get_path_value(args, path).map(|a| Cow::Owned(a.clone())) + } + + pub fn path_value>(&self, path: &[T]) -> Option> { + // TODO: add unit tests for this + if let Some(value) = self.graphql_ctx_value.as_ref() { + get_path_value(value, path).map(Cow::Owned) + } else { + Some(Cow::Owned(Value::new(serde_json::Value::Null))) + // get_path_value(self.graphql_ctx.value()?, path).map(Cow::Borrowed) + } + } +} + +pub fn get_path_value>(input: &Value, path: &[T]) -> Option { + let mut value = Some(input.serde()); + for name in path { + match value { + Some(serde_json::Value::Object(map)) => { + value = map.get(name.as_ref()); + } + + Some(serde_json::Value::Array(list)) => { + value = list.get(name.as_ref().parse::().ok()?); + } + _ => return None, + } + } + + value.map(|v| Value::new(v.clone())) +} diff --git a/projects/ssddOnTop/src/ir/eval_http.rs b/projects/ssddOnTop/src/ir/eval_http.rs new file mode 100644 index 0000000..9e6a89e --- /dev/null +++ b/projects/ssddOnTop/src/ir/eval_http.rs @@ -0,0 +1,52 @@ +use crate::http::response::Response; +use crate::http::RequestTemplate; +use crate::ir::eval_ctx::EvalContext; +use crate::value::Value; +use reqwest::Request; + +pub struct EvalHttp<'a, 'ctx> { + evaluation_ctx: &'ctx EvalContext<'a>, + request_template: &'a RequestTemplate, +} + +impl<'a, 'ctx> EvalHttp<'a, 'ctx> { + pub fn new( + evaluation_ctx: &'ctx EvalContext<'a>, + request_template: &'a RequestTemplate, + ) -> Self { + Self { + evaluation_ctx, + request_template, + } + } + pub fn init_request(&self) -> anyhow::Result { + self.request_template.to_request(self.evaluation_ctx) + } + + pub async fn execute(&self, req: Request) -> anyhow::Result> { + let ctx = &self.evaluation_ctx; + let response = execute_raw_request(ctx, req).await?; + + Ok(response) + } +} + +pub async fn execute_raw_request( + ctx: &EvalContext<'_>, + req: Request, +) -> anyhow::Result> { + let response = ctx + .request_ctx + .runtime + .http + .execute(req) + .await? + .to_serde_json()?; + + let resp = Response { + status: response.status, + headers: response.headers, + body: Value::new(response.body), + }; + Ok(resp) +} diff --git a/projects/ssddOnTop/src/ir/eval_io.rs b/projects/ssddOnTop/src/ir/eval_io.rs new file mode 100644 index 0000000..b0b5424 --- /dev/null +++ b/projects/ssddOnTop/src/ir/eval_io.rs @@ -0,0 +1,33 @@ +use crate::ir::eval_ctx::EvalContext; +use crate::ir::eval_http::EvalHttp; +use crate::ir::IO; +use crate::request_context::CacheErr; +use crate::value::Value; +use std::num::NonZeroU64; + +pub async fn eval_io(io: &IO, ctx: &mut EvalContext<'_>) -> anyhow::Result { + let key = io.cache_key(ctx); + + if let Some(val) = ctx.request_ctx.cache.get(&key).await? { + Ok(val.clone()) + } else { + let val = eval_io_inner(io, ctx).await?; + ctx.request_ctx + .cache + .set(key, val.clone(), NonZeroU64::MAX) + .await?; + Ok(val) + } +} + +async fn eval_io_inner(io: &IO, ctx: &mut EvalContext<'_>) -> Result { + match io { + IO::Http { req_template, .. } => { + let eval_http = EvalHttp::new(ctx, req_template); + let request = eval_http.init_request()?; + let response = eval_http.execute(request).await?; + + Ok(response.body) + } + } +} diff --git a/projects/ssddOnTop/src/ir/mod.rs b/projects/ssddOnTop/src/ir/mod.rs new file mode 100644 index 0000000..13e5987 --- /dev/null +++ b/projects/ssddOnTop/src/ir/mod.rs @@ -0,0 +1,7 @@ +mod discriminator; +pub mod eval_ctx; +mod eval_http; +mod eval_io; +mod model; + +pub use model::*; diff --git a/projects/ssddOnTop/src/ir/model.rs b/projects/ssddOnTop/src/ir/model.rs new file mode 100644 index 0000000..7a3c682 --- /dev/null +++ b/projects/ssddOnTop/src/ir/model.rs @@ -0,0 +1,66 @@ +use crate::http; +use crate::ir::eval_ctx::EvalContext; +use crate::ir::eval_io::eval_io; +use crate::value::Value; +use std::num::NonZeroU64; +// use crate::jit::eval_ctx::EvalContext; + +#[derive(Clone, Debug)] +pub enum IR { + IO(IO), + Cache(Cache), +} + +#[derive(Clone, Debug)] +pub struct Cache { + pub max_age: NonZeroU64, + pub io: IO, +} + +#[derive(Clone, Copy, Debug)] +pub struct DataLoaderId(usize); + +impl DataLoaderId { + pub fn new(id: usize) -> Self { + Self(id) + } + + pub fn as_usize(&self) -> usize { + self.0 + } +} + +#[derive(PartialEq, Eq, Clone, Hash, Debug)] +pub struct IoId(u64); + +impl IoId { + pub fn new(id: u64) -> Self { + Self(id) + } + + pub fn as_u64(&self) -> u64 { + self.0 + } +} + +#[derive(Clone, Debug)] +pub enum IO { + Http { req_template: http::RequestTemplate }, +} + +impl IR { + pub async fn eval<'a, 'b>(&'a self, ctx: &'b mut EvalContext<'a>) -> anyhow::Result { + match self { + IR::IO(io) => eval_io(io, ctx).await, + IR::Cache(_) => todo!(), + } + } +} + +impl<'a> IO { + pub fn cache_key(&self, ctx: &EvalContext<'a>) -> IoId { + match self { + IO::Http { req_template, .. } => req_template.cache_key(ctx), + } + } +} diff --git a/projects/ssddOnTop/src/jit/mod.rs b/projects/ssddOnTop/src/jit/mod.rs new file mode 100644 index 0000000..65880be --- /dev/null +++ b/projects/ssddOnTop/src/jit/mod.rs @@ -0,0 +1 @@ +pub mod model; diff --git a/projects/ssddOnTop/src/jit/model.rs b/projects/ssddOnTop/src/jit/model.rs new file mode 100644 index 0000000..df68c88 --- /dev/null +++ b/projects/ssddOnTop/src/jit/model.rs @@ -0,0 +1,333 @@ +use crate::blueprint::model::{FieldName, TypeName}; +use crate::blueprint::{Blueprint, FieldHash}; +use crate::ir::eval_ctx::EvalContext; +use crate::ir::IR; +use crate::json::JsonObjectLike; +use crate::value::Value; +use async_graphql::parser::types::{DocumentOperations, ExecutableDocument, OperationType, Selection, SelectionSet}; +use async_graphql::Positioned; +use serde_json::Map; +use std::fmt::Debug; +use std::future::Future; +use std::pin::Pin; +use serde_json_borrow::ObjectAsVec; + +pub struct PathFinder<'a> { + doc: ExecutableDocument, + blueprint: &'a Blueprint, +} +#[derive(Debug)] +pub struct Fields { + fields: Vec, +} + +#[derive(Debug)] +pub struct Fields1<'a> { + fields: Vec>, +} + +fn to_borrowed(val: &Value) -> serde_json_borrow::Value { + serde_json_borrow::Value::from(val.serde()) +} + +impl<'a> Fields1<'a> { + #[inline(always)] + pub fn finalize(&'a self) -> serde_json_borrow::Value<'a> { + let mut map = ObjectAsVec::new(); + for field in self.fields.iter() { + let name = field.name; + let val = Self::finalize_inner(field, None, None); + map.insert(name, val); + } + let mut ans = ObjectAsVec::new(); + ans.insert("data", serde_json_borrow::Value::Object(map)); + // map.insert("data".to_string(), self.finalize_inner()); + // serde_json::Value::Object(map) + serde_json_borrow::Value::Object(ans) + } + #[inline(always)] + fn finalize_inner(field: &'a Field1<'a>, mut value: Option<&'a serde_json_borrow::Value<'a>>, index: Option) -> serde_json_borrow::Value<'a> { + if let Some(val) = &field.resolved { + if value.is_none() { + value = Some(val); + } + } + if let Some(val) = value{ + match (val.as_array(), val.as_object()) { + (_, Some(obj)) => { + // let mut ans = Map::new(); + let mut ans = ObjectAsVec::new(); + + if field.nested.is_empty() { + let val = obj.get_key(field.name); + let value = Self::finalize_inner(field, val, index); + ans.insert(field.name, value); + } else { + for child in field.nested.iter() { + let child_name = child.name; + let val = obj.get_key(child.name); + let val = Self::finalize_inner(child, val, index); + ans.insert(child_name, val); + } + } + + serde_json_borrow::Value::Object(ans) + } + (Some(arr), _) => { + if let Some(index) = index { + let val = arr.get(index); + let val = Self::finalize_inner(field, val, None); + val + } else { + let mut ans = vec![]; + for (i, val) in arr.iter().enumerate() { + let val = Self::finalize_inner(field, Some(val), Some(i)); + ans.push(val); + } + serde_json_borrow::Value::Array(ans) + } + } + _ => value.cloned().unwrap_or_default(), + } + } else { + serde_json_borrow::Value::Null + } + } +} + +#[derive(Debug)] +// TODO: give it a lifetime +// it won't make much difference.. +// but anyways +pub struct Field { + ir: Option, + pub name: String, + pub type_of: crate::blueprint::wrapping_type::Type, + nested: Vec, + pub args: Option, + pub resolved: Option, +} + +impl Fields { + #[inline(always)] + pub fn to_borrowed<'a>(&'a self) -> Fields1<'a> { + let fields = Self::borrowed_inner(&self.fields); + Fields1 { + fields, + } + } + + #[inline(always)] + pub fn borrowed_inner<'a>(vec: &'a [Field]) -> Vec> { + let mut ans = vec![]; + for field in vec.iter() { + let field = Field1 { + ir: field.ir.as_ref(), + name: field.name.as_str(), + type_of: &field.type_of, + nested: Self::borrowed_inner(&field.nested), + args: field.args.as_ref(), + resolved: field.resolved.as_ref().map(|v| serde_json_borrow::Value::from(v.serde())), + }; + ans.push(field); + } + ans + } + +} + +#[derive(Debug)] +pub struct Field1<'a> { + ir: Option<&'a IR>, + pub name: &'a str, + pub type_of: &'a crate::blueprint::wrapping_type::Type, + nested: Vec>, + pub args: Option<&'a Value>, + pub resolved: Option>, +} +impl Fields { + #[inline(always)] + pub async fn resolve<'a>(mut self, eval_context: EvalContext<'a>) -> anyhow::Result { + let mut ans = vec![]; + ans = Self::resolve_inner(self.fields, eval_context, None).await?; + Ok(Fields { + fields: ans, + }) + } + + #[inline(always)] + fn resolve_inner<'a>(fields: Vec, mut eval_context: EvalContext<'a>, parent: Option) -> Pin>> + Send + 'a>> { + Box::pin(async move { + let mut ans = vec![]; + for mut field in fields { + let mut parent_val = None; + + if let Some(ir) = field.ir.as_ref() { + if let Some(val) = field.args.clone() { + eval_context = eval_context.with_args(val); + } + + let val = match &parent { + Some(val) => { + match val.serde() { + serde_json::Value::Array(arr) => { + let mut ans = vec![]; + for val in arr { + eval_context = eval_context.with_value(Value::new(val.clone())); + let val = ir.eval(&mut eval_context.clone()).await?; + ans.push(val.into_serde()); + } + Some(Value::new(serde_json::Value::Array(ans))) + } + val => { + eval_context = eval_context.with_value(Value::new(val.clone())); + let val = ir.eval(&mut eval_context.clone()).await?; + Some(val) + } + } + } + None => { + let val = ir.eval(&mut eval_context.clone()).await?; + Some(val) + } + }; + parent_val = val.clone(); + field.resolved = val; + } else { + // println!("hx: {}", field.name); + // let val = Self::resolve_non_ir(eval_context.graphql_ctx_value.as_ref().map(|v| v.serde()).unwrap_or(&serde_json::Value::Null), field.name.as_str()); + // println!("{}",val); + // let val = eval_context.path_value(&[field.name.as_str()]); + // let val = val.unwrap_or(Cow::Owned(Value::new(serde_json::Value::Null))).into_owned(); + // field.resolved = Some(Value::new(val)); + } + + let eval_ctx_clone = eval_context.clone(); + field.nested = Self::resolve_inner(field.nested, eval_ctx_clone, parent_val).await?; + ans.push(field); + } + Ok(ans) + }) + } + #[inline(always)] + fn resolve_non_ir(value: &serde_json::Value, key: &str) -> serde_json::Value { + match value { + serde_json::Value::Array(arr) => { + let mut ans = vec![]; + for val in arr { + ans.push(Self::resolve_non_ir(val, key)); + } + serde_json::Value::Array(ans) + } + serde_json::Value::Object(obj) => { + let mut ans = Map::new(); + obj.get_key(key).map(|v| ans.insert(key.to_string(), v.clone())).unwrap_or_default(); + serde_json::Value::Object(ans) + } + val => val.clone(), + } + } +} + +pub struct Holder<'a> { + field_name: &'a str, + field_type: &'a str, + args: Vec<(&'a str, Value)>, +} + +impl<'a> PathFinder<'a> { + pub fn new(doc: ExecutableDocument, blueprint: &'a Blueprint) -> Self { + Self { doc, blueprint } + } + pub async fn exec(&'a self) -> Fields { + match &self.doc.operations { + DocumentOperations::Single(single) => { + let operation = &single.node; + let selection_set = &operation.selection_set.node; + let ty = match &operation.ty { + OperationType::Query => { + let query = self.blueprint.schema.query.as_ref().map(|v| v.as_str()); + query + } + OperationType::Mutation => None, + OperationType::Subscription => None, + }; + if let Some(ty) = ty { + Fields { + fields: self.iter(selection_set, ty), + } + } else { + Fields { + fields: vec![], + } + } + } + DocumentOperations::Multiple(multi) => { + let (_,single) = multi.iter().next().unwrap(); + let operation = &single.node; + let selection_set = &operation.selection_set.node; + let ty = match &operation.ty { + OperationType::Query => { + let query = self.blueprint.schema.query.as_ref().map(|v| v.as_str()); + query + } + OperationType::Mutation => None, + OperationType::Subscription => None, + }; + if let Some(ty) = ty { + Fields { + fields: self.iter(selection_set, ty), + } + } else { + Fields { + fields: vec![], + } + } + } + } + } + #[inline(always)] + fn iter( + &self, + selection: &SelectionSet, + type_condition: &str, + ) -> Vec { + let mut fields = vec![]; + for selection in &selection.items { + match &selection.node { + Selection::Field(Positioned { node: gql_field, .. }) => { + let field_name = gql_field.name.node.as_str(); + let request_args = gql_field + .arguments + .iter() + .map(|(k, v)| (k.node.as_str().to_string(), v.node.to_owned().into_const().map(|v| v.into_json().ok()).flatten().unwrap())) + .collect::>(); + + if let Some(field_def) = self.blueprint.fields.get(&FieldHash::new(FieldName(field_name.to_string()), TypeName(type_condition.to_string()))) { + let type_of = field_def.type_of.clone(); + let child_fields = self.iter( + &gql_field.selection_set.node, + type_of.name(), + ); + let field = Field { + ir: field_def.ir.clone(), + name: field_def.name.as_ref().to_string(), + type_of, + nested: child_fields, + args: match request_args.is_empty() { + false => Some(Value::new(serde_json::Value::Object(request_args))), + true => None, + }, + resolved: None, + }; + + fields.push(field); + } + } + _ => (), + } + } + + fields + } +} \ No newline at end of file diff --git a/projects/ssddOnTop/src/json/borrow.rs b/projects/ssddOnTop/src/json/borrow.rs new file mode 100644 index 0000000..1a38196 --- /dev/null +++ b/projects/ssddOnTop/src/json/borrow.rs @@ -0,0 +1,125 @@ +use std::borrow::Cow; + +use serde_json_borrow::{ObjectAsVec, Value}; + +use super::{gather_path_matches, group_by_key, JsonLike, JsonObjectLike}; + +// BorrowedValue +impl<'ctx> JsonObjectLike<'ctx> for ObjectAsVec<'ctx> { + type Value = Value<'ctx>; + + fn new() -> Self { + ObjectAsVec::default() + } + + fn get_key(&self, key: &str) -> Option<&Self::Value> { + self.get(key) + } + + fn insert_key(&mut self, key: &'ctx str, value: Self::Value) { + self.insert(key, value); + } +} + +impl<'ctx> JsonLike<'ctx> for Value<'ctx> { + type JsonObject = ObjectAsVec<'ctx>; + + fn null() -> Self { + Value::Null + } + + fn object(obj: Self::JsonObject) -> Self { + Value::Object(obj) + } + + fn array(arr: Vec) -> Self { + Value::Array(arr) + } + + fn string(s: Cow<'ctx, str>) -> Self { + Value::Str(s) + } + + fn as_array(&self) -> Option<&Vec> { + match self { + Value::Array(array) => Some(array), + _ => None, + } + } + + fn into_array(self) -> Option> { + match self { + Value::Array(array) => Some(array), + _ => None, + } + } + + fn as_object(&self) -> Option<&Self::JsonObject> { + self.as_object() + } + + fn as_object_mut(&mut self) -> Option<&mut Self::JsonObject> { + match self { + Value::Object(obj) => Some(obj), + _ => None, + } + } + + fn into_object(self) -> Option { + match self { + Value::Object(obj) => Some(obj), + _ => None, + } + } + + fn as_str(&self) -> Option<&str> { + self.as_str() + } + + fn as_i64(&self) -> Option { + self.as_i64() + } + + fn as_u64(&self) -> Option { + self.as_u64() + } + + fn as_f64(&self) -> Option { + self.as_f64() + } + + fn as_bool(&self) -> Option { + self.as_bool() + } + + fn is_null(&self) -> bool { + self.is_null() + } + + fn get_path>(&'ctx self, path: &[T]) -> Option<&Self> { + let mut val = self; + for token in path { + val = match val { + Value::Array(arr) => { + let index = token.as_ref().parse::().ok()?; + arr.get(index)? + } + Value::Object(map) => map.get(token.as_ref())?, + _ => return None, + }; + } + Some(val) + } + + fn get_key(&'ctx self, path: &str) -> Option<&Self> { + match self { + Value::Object(map) => map.get(path), + _ => None, + } + } + + fn group_by(&'ctx self, path: &[String]) -> std::collections::HashMap> { + let src = gather_path_matches(self, path, vec![]); + group_by_key(src) + } +} diff --git a/projects/ssddOnTop/src/json/graphql.rs b/projects/ssddOnTop/src/json/graphql.rs new file mode 100644 index 0000000..aa049a8 --- /dev/null +++ b/projects/ssddOnTop/src/json/graphql.rs @@ -0,0 +1,145 @@ +use std::borrow::Cow; +use std::collections::HashMap; + +use async_graphql::Name; +use async_graphql_value::ConstValue; +use indexmap::IndexMap; + +use super::*; + +impl<'obj, Value: JsonLike<'obj> + Clone> JsonObjectLike<'obj> for IndexMap { + type Value = Value; + + fn new() -> Self { + IndexMap::new() + } + + fn get_key(&self, key: &str) -> Option<&Self::Value> { + self.get(key) + } + + fn insert_key(&mut self, key: &'obj str, value: Self::Value) { + self.insert(Name::new(key), value); + } +} + +impl<'json> JsonLike<'json> for ConstValue { + type JsonObject = IndexMap; + + fn as_array(&self) -> Option<&Vec> { + match self { + ConstValue::List(seq) => Some(seq), + _ => None, + } + } + + fn into_array(self) -> Option> { + match self { + ConstValue::List(seq) => Some(seq), + _ => None, + } + } + + fn as_str(&self) -> Option<&str> { + match self { + ConstValue::String(s) => Some(s), + _ => None, + } + } + + fn as_i64(&self) -> Option { + match self { + ConstValue::Number(n) => n.as_i64(), + _ => None, + } + } + + fn as_u64(&self) -> Option { + match self { + ConstValue::Number(n) => n.as_u64(), + _ => None, + } + } + + fn as_f64(&self) -> Option { + match self { + ConstValue::Number(n) => n.as_f64(), + _ => None, + } + } + + fn as_bool(&self) -> Option { + match self { + ConstValue::Boolean(b) => Some(*b), + _ => None, + } + } + + fn is_null(&self) -> bool { + matches!(self, ConstValue::Null) + } + + fn get_path>(&self, path: &[T]) -> Option<&Self> { + let mut val = self; + for token in path { + val = match val { + ConstValue::List(seq) => { + let index = token.as_ref().parse::().ok()?; + seq.get(index)? + } + ConstValue::Object(map) => map.get(token.as_ref())?, + _ => return None, + }; + } + Some(val) + } + + fn get_key(&self, path: &str) -> Option<&Self> { + match self { + ConstValue::Object(map) => map.get(&async_graphql::Name::new(path)), + _ => None, + } + } + + fn group_by(&self, path: &[String]) -> HashMap> { + let src = gather_path_matches(self, path, vec![]); + group_by_key(src) + } + + fn null() -> Self { + Default::default() + } + + fn as_object(&self) -> Option<&Self::JsonObject> { + match self { + ConstValue::Object(map) => Some(map), + _ => None, + } + } + + fn as_object_mut(&mut self) -> Option<&mut Self::JsonObject> { + match self { + ConstValue::Object(map) => Some(map), + _ => None, + } + } + + fn into_object(self) -> Option { + match self { + ConstValue::Object(map) => Some(map), + _ => None, + } + } + + fn object(obj: Self::JsonObject) -> Self { + ConstValue::Object(obj) + } + + fn array(arr: Vec) -> Self { + ConstValue::List(arr) + } + + fn string(s: Cow<'json, str>) -> Self { + ConstValue::String(s.to_string()) + } +} diff --git a/projects/ssddOnTop/src/json/json_like.rs b/projects/ssddOnTop/src/json/json_like.rs new file mode 100644 index 0000000..d46f6fe --- /dev/null +++ b/projects/ssddOnTop/src/json/json_like.rs @@ -0,0 +1,194 @@ +use std::borrow::Cow; +use std::collections::HashMap; + +pub trait JsonLikeOwned: for<'json> JsonLike<'json> {} +impl JsonLikeOwned for T where T: for<'json> JsonLike<'json> {} + +/// A trait for objects that can be used as JSON values +pub trait JsonLike<'json>: Sized { + type JsonObject: JsonObjectLike<'json, Value = Self>; + + // Constructors + fn null() -> Self; + fn object(obj: Self::JsonObject) -> Self; + fn array(arr: Vec) -> Self; + fn string(s: Cow<'json, str>) -> Self; + + // Operators + fn as_array(&self) -> Option<&Vec>; + fn into_array(self) -> Option>; + fn as_object(&self) -> Option<&Self::JsonObject>; + fn as_object_mut(&mut self) -> Option<&mut Self::JsonObject>; + fn into_object(self) -> Option; + fn as_str(&self) -> Option<&str>; + fn as_i64(&self) -> Option; + fn as_u64(&self) -> Option; + fn as_f64(&self) -> Option; + fn as_bool(&self) -> Option; + fn is_null(&self) -> bool; + fn get_path>(&'json self, path: &[T]) -> Option<&Self>; + fn get_key(&'json self, path: &str) -> Option<&Self>; + fn group_by(&'json self, path: &[String]) -> HashMap>; +} + +/// A trait for objects that can be used as JSON objects +pub trait JsonObjectLike<'obj>: Sized { + type Value; + fn new() -> Self; + fn get_key(&self, key: &str) -> Option<&Self::Value>; + fn insert_key(&mut self, key: &'obj str, value: Self::Value); +} + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + use serde_json::json; + + use super::super::gather_path_matches; + use super::{JsonLike, JsonObjectLike}; + use crate::json::group_by_key; + + // for lifetime testing purposes + #[allow(dead_code)] + fn create_json_like<'a, Value: JsonLike<'a>>() -> Value { + unimplemented!("fake test fn") + } + + // for lifetime testing purposes + #[allow(dead_code)] + fn test_json_like_lifetime<'a, Value: JsonLike<'a> + Clone>() -> Value { + let value: Value = create_json_like(); + + if value.is_null() { + return Value::null(); + } + + if value.as_bool().is_some() { + println!("bool"); + } + + if value.as_f64().is_some() { + println!("f64"); + } + + if let Some(s) = value.as_str() { + return Value::string(s.to_string().into()); + } + + if let Some(arr) = value.as_array() { + return Value::array(arr.clone()); + } + + if value.as_object().is_some() { + return Value::object(Value::JsonObject::new()); + } + + value + } + + #[test] + fn test_gather_path_matches() { + let input = json!([ + {"id": "1"}, + {"id": "2"}, + {"id": "3"} + ]); + + let actual = + serde_json::to_value(gather_path_matches(&input, &["id".into()], vec![])).unwrap(); + + let expected = json!( + [ + ["1", {"id": "1"}], + ["2", {"id": "2"}], + ["3", {"id": "3"}], + ] + ); + + assert_eq!(actual, expected) + } + + #[test] + fn test_gather_path_matches_nested() { + let input = json!({ + "data": [ + {"user": {"id": "1"}}, + {"user": {"id": "2"}}, + {"user": {"id": "3"}}, + {"user": [ + {"id": "4"}, + {"id": "5"} + ] + }, + ] + }); + + let actual = serde_json::to_value(gather_path_matches( + &input, + &["data".into(), "user".into(), "id".into()], + vec![], + )) + .unwrap(); + + let expected = json!( + [ + ["1", {"id": "1"}], + ["2", {"id": "2"}], + ["3", {"id": "3"}], + ["4", {"id": "4"}], + ["5", {"id": "5"}], + + ] + ); + + assert_eq!(actual, expected) + } + + #[test] + fn test_group_by_key() { + let arr = vec![ + (json!("1"), json!({"id": "1"})), + (json!("2"), json!({"id": "2"})), + (json!("2"), json!({"id": "2"})), + (json!("3"), json!({"id": "3"})), + ]; + let input: Vec<(&serde_json::Value, &serde_json::Value)> = + arr.iter().map(|a| (&a.0, &a.1)).collect(); + + let actual = serde_json::to_value(group_by_key(input)).unwrap(); + + let expected = json!( + { + "1": [{"id": "1"}], + "2": [{"id": "2"}, {"id": "2"}], + "3": [{"id": "3"}], + } + ); + + assert_eq!(actual, expected) + } + + #[test] + fn test_group_by_numeric_key() { + let arr = vec![ + (json!(1), json!({"id": 1})), + (json!(2), json!({"id": 2})), + (json!(2), json!({"id": 2})), + (json!(3), json!({"id": 3})), + ]; + let input: Vec<(&serde_json::Value, &serde_json::Value)> = + arr.iter().map(|a| (&a.0, &a.1)).collect(); + + let actual = serde_json::to_value(group_by_key(input)).unwrap(); + + let expected = json!( + { + "1": [{"id": 1}], + "2": [{"id": 2}, {"id": 2}], + "3": [{"id": 3}], + } + ); + + assert_eq!(actual, expected) + } +} diff --git a/projects/ssddOnTop/src/json/json_like_list.rs b/projects/ssddOnTop/src/json/json_like_list.rs new file mode 100644 index 0000000..6743b38 --- /dev/null +++ b/projects/ssddOnTop/src/json/json_like_list.rs @@ -0,0 +1,28 @@ +use super::JsonLike; + +pub trait JsonLikeList<'json>: JsonLike<'json> { + fn map(self, mut mapper: impl FnMut(Self) -> Result) -> Result { + if self.as_array().is_some() { + let new = self + .into_array() + .unwrap() + .into_iter() + .map(mapper) + .collect::>()?; + + Ok(Self::array(new)) + } else { + mapper(self) + } + } + + fn try_for_each(&self, mut f: impl FnMut(&Self) -> Result<(), Err>) -> Result<(), Err> { + if let Some(arr) = self.as_array() { + arr.iter().try_for_each(f) + } else { + f(self) + } + } +} + +impl<'json, T: JsonLike<'json>> JsonLikeList<'json> for T {} diff --git a/projects/ssddOnTop/src/json/mod.rs b/projects/ssddOnTop/src/json/mod.rs new file mode 100644 index 0000000..076813b --- /dev/null +++ b/projects/ssddOnTop/src/json/mod.rs @@ -0,0 +1,56 @@ +mod borrow; +mod graphql; +mod json_like; +mod json_like_list; +mod serde; + +use std::collections::HashMap; + +pub use json_like::*; +pub use json_like_list::*; + +// Highly micro-optimized and benchmarked version of get_path_all +// Any further changes should be verified with benchmarks +pub fn gather_path_matches<'json, J: JsonLike<'json>>( + root: &'json J, + path: &[String], + mut vector: Vec<(&'json J, &'json J)>, +) -> Vec<(&'json J, &'json J)> { + if let Some(root) = root.as_array() { + for value in root.iter() { + vector = gather_path_matches(value, path, vector); + } + } else if let Some((key, tail)) = path.split_first() { + if let Some(value) = root.get_key(key) { + if tail.is_empty() { + vector.push((value, root)); + } else { + vector = gather_path_matches(value, tail, vector); + } + } + } + + vector +} + +fn group_by_key<'json, J: JsonLike<'json>>( + src: Vec<(&'json J, &'json J)>, +) -> HashMap> { + let mut map: HashMap> = HashMap::new(); + for (key, value) in src { + // Need to handle number and string keys + let key_str = key + .as_str() + .map(|a| a.to_string()) + .or_else(|| key.as_f64().map(|a| a.to_string())); + + if let Some(key) = key_str { + if let Some(values) = map.get_mut(&key) { + values.push(value); + } else { + map.insert(key, vec![value]); + } + } + } + map +} diff --git a/projects/ssddOnTop/src/json/serde.rs b/projects/ssddOnTop/src/json/serde.rs new file mode 100644 index 0000000..4f6fad6 --- /dev/null +++ b/projects/ssddOnTop/src/json/serde.rs @@ -0,0 +1,119 @@ +use std::borrow::Cow; +use std::collections::HashMap; + +use super::{JsonLike, JsonObjectLike}; + +impl<'obj> JsonObjectLike<'obj> for serde_json::Map { + type Value = serde_json::Value; + + fn new() -> Self { + serde_json::Map::new() + } + + fn get_key(&self, key: &str) -> Option<&serde_json::Value> { + self.get(key) + } + + fn insert_key(&mut self, key: &'obj str, value: Self::Value) { + self.insert(key.to_owned(), value); + } +} + +impl<'json> JsonLike<'json> for serde_json::Value { + type JsonObject = serde_json::Map; + + fn as_array(&self) -> Option<&Vec> { + self.as_array() + } + + fn into_array(self) -> Option> { + if let Self::Array(vec) = self { + Some(vec) + } else { + None + } + } + + fn as_str(&self) -> Option<&str> { + self.as_str() + } + + fn as_i64(&self) -> Option { + self.as_i64() + } + + fn as_u64(&self) -> Option { + self.as_u64() + } + + fn as_f64(&self) -> Option { + self.as_f64() + } + + fn as_bool(&self) -> Option { + self.as_bool() + } + + fn is_null(&self) -> bool { + self.is_null() + } + + fn get_path>(&self, path: &[T]) -> Option<&Self> { + let mut val = self; + for token in path { + val = match val { + serde_json::Value::Array(arr) => { + let index = token.as_ref().parse::().ok()?; + arr.get(index)? + } + serde_json::Value::Object(map) => map.get(token.as_ref())?, + _ => return None, + }; + } + Some(val) + } + + fn get_key(&self, path: &str) -> Option<&Self> { + match self { + serde_json::Value::Object(map) => map.get(path), + _ => None, + } + } + + fn group_by(&self, path: &[String]) -> HashMap> { + let src = super::gather_path_matches(self, path, vec![]); + super::group_by_key(src) + } + + fn null() -> Self { + Self::Null + } + + fn as_object(&self) -> Option<&Self::JsonObject> { + self.as_object() + } + + fn as_object_mut(&mut self) -> Option<&mut Self::JsonObject> { + self.as_object_mut() + } + + fn into_object(self) -> Option { + if let Self::Object(obj) = self { + Some(obj) + } else { + None + } + } + + fn object(obj: Self::JsonObject) -> Self { + serde_json::Value::Object(obj) + } + + fn array(arr: Vec) -> Self { + serde_json::Value::Array(arr) + } + + fn string(s: Cow<'json, str>) -> Self { + serde_json::Value::String(s.to_string()) + } +} diff --git a/projects/ssddOnTop/src/lib.rs b/projects/ssddOnTop/src/lib.rs new file mode 100644 index 0000000..ccef48e --- /dev/null +++ b/projects/ssddOnTop/src/lib.rs @@ -0,0 +1,25 @@ +#![allow(unused, non_snake_case)] +mod app_ctx; +mod blueprint; +mod cache; +mod config; +mod directive; +mod dl; +mod endpoint; +mod from_doc; +mod hasher; +mod helpers; +mod http; +mod ir; +mod jit; +mod json; +mod mustache; +mod path; +mod request_context; +pub mod run; +mod target_runtime; +mod value; + +pub fn is_default(val: &T) -> bool { + *val == T::default() +} diff --git a/projects/ssddOnTop/src/main.rs b/projects/ssddOnTop/src/main.rs new file mode 100644 index 0000000..c0fe75b --- /dev/null +++ b/projects/ssddOnTop/src/main.rs @@ -0,0 +1,9 @@ +fn main() -> anyhow::Result<()> { + // tracing_subscriber::fmt::init(); + let rt = tokio::runtime::Builder::new_multi_thread() + .worker_threads(num_cpus::get()) + .enable_all() + .build()?; + rt.block_on(ssddOnTop::run::run())?; + Ok(()) +} diff --git a/projects/ssddOnTop/src/mustache/eval.rs b/projects/ssddOnTop/src/mustache/eval.rs new file mode 100644 index 0000000..74939b7 --- /dev/null +++ b/projects/ssddOnTop/src/mustache/eval.rs @@ -0,0 +1,274 @@ +use crate::mustache::model::{Mustache, Segment}; +use crate::path::PathString; + +pub trait Eval<'a> { + type In; + type Out; + + fn eval(&'a self, mustache: &'a Mustache, in_value: &'a Self::In) -> Self::Out; +} + +pub struct PathStringEval(std::marker::PhantomData); + +impl PathStringEval { + pub fn new() -> Self { + Self(std::marker::PhantomData) + } +} + +impl<'a, A: PathString> Eval<'a> for PathStringEval { + type In = A; + type Out = String; + + fn eval(&self, mustache: &Mustache, in_value: &Self::In) -> Self::Out { + mustache + .segments() + .iter() + .map(|segment| match segment { + Segment::Literal(text) => text.clone(), + Segment::Expression(parts) => in_value + .path_string(parts) + .map(|a| a.to_string()) + .unwrap_or_default(), + }) + .collect() + } +} + +pub trait Path { + fn get_path>(&self, in_value: &[S]) -> Option<&Self>; +} + +pub struct PathEval(std::marker::PhantomData); + +impl PathEval { + #[allow(unused)] + pub fn new() -> Self { + Self(std::marker::PhantomData) + } +} + +#[allow(unused)] +pub enum Exit<'a, A> { + Text(&'a str), + Value(&'a A), +} + +impl<'a, A: Path + 'a> Eval<'a> for PathEval<&'a A> { + type In = &'a A; + type Out = Vec>; + + fn eval(&'a self, mustache: &'a Mustache, in_value: &'a Self::In) -> Self::Out { + mustache + .segments() + .iter() + .filter_map(|segment| match segment { + Segment::Literal(text) => Some(Exit::Text(text)), + Segment::Expression(parts) => in_value.get_path(parts).map(Exit::Value), + }) + .collect::>() + } +} + +pub struct PathGraphqlEval(std::marker::PhantomData); + +impl PathGraphqlEval { + pub fn new() -> Self { + Self(std::marker::PhantomData) + } +} + +/*impl<'a, A: PathGraphql> Eval<'a> for PathGraphqlEval { + type In = A; + type Out = String; + + fn eval(&self, mustache: &Mustache, in_value: &Self::In) -> Self::Out { + mustache + .segments() + .iter() + .map(|segment| match segment { + Segment::Literal(text) => text.to_string(), + Segment::Expression(parts) => in_value.path_graphql(parts).unwrap_or_default(), + }) + .collect() + } +}*/ + +impl Mustache { + pub fn render(&self, value: &impl PathString) -> String { + PathStringEval::new().eval(self, value) + } + + /* pub fn render_graphql(&self, value: &impl PathGraphql) -> String { + PathGraphqlEval::new().eval(self, value) + }*/ +} + +/*#[cfg(test)] +mod tests { + + mod render { + use std::borrow::Cow; + + use serde_json::json; + + use crate::mustache::model::{Mustache, Segment}; + + #[test] + fn test_query_params_template() { + let s = r"/v1/templates?project-id={{value.projectId}}"; + let mustache: Mustache = Mustache::parse(s); + let ctx = json!(json!({"value": {"projectId": "123"}})); + let result = mustache.render(&ctx); + assert_eq!(result, "/v1/templates?project-id=123"); + } + + #[test] + fn test_render_mixed() { + struct DummyPath; + + impl PathString for DummyPath { + fn path_string>(&self, parts: &[T]) -> Option> { + let parts: Vec<&str> = parts.iter().map(AsRef::as_ref).collect(); + + if parts == ["foo", "bar"] { + Some(Cow::Borrowed("FOOBAR")) + } else if parts == ["baz", "qux"] { + Some(Cow::Borrowed("BAZQUX")) + } else { + None + } + } + } + + let mustache = Mustache::from(vec![ + Segment::Literal("prefix ".to_string()), + Segment::Expression(vec!["foo".to_string(), "bar".to_string()]), + Segment::Literal(" middle ".to_string()), + Segment::Expression(vec!["baz".to_string(), "qux".to_string()]), + Segment::Literal(" suffix".to_string()), + ]); + + assert_eq!( + mustache.render(&DummyPath), + "prefix FOOBAR middle BAZQUX suffix" + ); + } + + #[test] + fn test_render_with_missing_path() { + struct DummyPath; + + impl PathString for DummyPath { + fn path_string>(&self, _: &[T]) -> Option> { + None + } + } + + let mustache = Mustache::from(vec![ + Segment::Literal("prefix ".to_string()), + Segment::Expression(vec!["foo".to_string(), "bar".to_string()]), + Segment::Literal(" suffix".to_string()), + ]); + + assert_eq!(mustache.render(&DummyPath), "prefix suffix"); + } + + #[test] + fn test_json_like() { + let mustache = Mustache::parse(r#"{registered: "{{foo}}", display: "{{bar}}"}"#); + let ctx = json!({"foo": "baz", "bar": "qux"}); + let result = mustache.render(&ctx); + assert_eq!(result, r#"{registered: "baz", display: "qux"}"#); + } + + #[test] + fn test_json_like_static() { + let mustache = Mustache::parse(r#"{registered: "foo", display: "bar"}"#); + let ctx = json!({}); // Context is not used in this case + let result = mustache.render(&ctx); + assert_eq!(result, r#"{registered: "foo", display: "bar"}"#); + } + + #[test] + fn test_render_preserves_spaces() { + struct DummyPath; + + impl PathString for DummyPath { + fn path_string>(&self, parts: &[T]) -> Option> { + let parts: Vec<&str> = parts.iter().map(AsRef::as_ref).collect(); + + if parts == ["foo"] { + Some(Cow::Borrowed("bar")) + } else { + None + } + } + } + + let mustache = Mustache::from(vec![ + Segment::Literal(" ".to_string()), + Segment::Expression(vec!["foo".to_string()]), + Segment::Literal(" ".to_string()), + ]); + + assert_eq!(mustache.render(&DummyPath).as_str(), " bar "); + } + } + + mod render_graphql { + use crate::core::mustache::{Mustache, Segment}; + use crate::core::path::PathGraphql; + + #[test] + fn test_render_mixed() { + struct DummyPath; + + impl PathGraphql for DummyPath { + fn path_graphql>(&self, parts: &[T]) -> Option { + let parts: Vec<&str> = parts.iter().map(AsRef::as_ref).collect(); + + if parts == ["foo", "bar"] { + Some("FOOBAR".to_owned()) + } else if parts == ["baz", "qux"] { + Some("BAZQUX".to_owned()) + } else { + None + } + } + } + + let mustache = Mustache::from(vec![ + Segment::Literal("prefix ".to_string()), + Segment::Expression(vec!["foo".to_string(), "bar".to_string()]), + Segment::Literal(" middle ".to_string()), + Segment::Expression(vec!["baz".to_string(), "qux".to_string()]), + Segment::Literal(" suffix".to_string()), + ]); + + assert_eq!( + mustache.render_graphql(&DummyPath), + "prefix FOOBAR middle BAZQUX suffix" + ); + } + + #[test] + fn test_render_with_missing_path() { + struct DummyPath; + + impl PathGraphql for DummyPath { + fn path_graphql>(&self, _: &[T]) -> Option { + None + } + } + + let mustache = Mustache::from(vec![ + Segment::Literal("prefix ".to_string()), + Segment::Expression(vec!["foo".to_string(), "bar".to_string()]), + Segment::Literal(" suffix".to_string()), + ]); + + assert_eq!(mustache.render_graphql(&DummyPath), "prefix suffix"); + } + } +}*/ diff --git a/projects/ssddOnTop/src/mustache/mod.rs b/projects/ssddOnTop/src/mustache/mod.rs new file mode 100644 index 0000000..2cd3ff7 --- /dev/null +++ b/projects/ssddOnTop/src/mustache/mod.rs @@ -0,0 +1,3 @@ +mod eval; +pub mod model; +pub mod parse; diff --git a/projects/ssddOnTop/src/mustache/model.rs b/projects/ssddOnTop/src/mustache/model.rs new file mode 100644 index 0000000..af18d19 --- /dev/null +++ b/projects/ssddOnTop/src/mustache/model.rs @@ -0,0 +1,68 @@ +use std::fmt::Display; + +#[derive(Debug, Clone, PartialEq, Hash, Default)] +pub struct Mustache(Vec); + +#[derive(Debug, Clone, PartialEq, Hash)] +pub enum Segment { + Literal(String), + Expression(Vec), +} + +impl> From for Mustache { + fn from(value: A) -> Self { + Mustache(value.into_iter().collect()) + } +} + +impl Mustache { + pub fn is_const(&self) -> bool { + match self { + Mustache(segments) => { + for s in segments { + if let Segment::Expression(_) = s { + return false; + } + } + true + } + } + } + + pub fn segments(&self) -> &Vec { + &self.0 + } + + pub fn expression_segments(&self) -> Vec<&Vec> { + self.segments() + .iter() + .filter_map(|seg| match seg { + Segment::Expression(parts) => Some(parts), + _ => None, + }) + .collect() + } + + /// Checks if the mustache template contains the given expression + pub fn expression_contains(&self, expression: &str) -> bool { + self.segments() + .iter() + .any(|seg| matches!(seg, Segment::Expression(parts) if parts.iter().any(|part| part.as_str() == expression))) + } +} + +impl Display for Mustache { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let str = self + .segments() + .iter() + .map(|segment| match segment { + Segment::Literal(text) => text.clone(), + Segment::Expression(parts) => format!("{{{{{}}}}}", parts.join(".")), + }) + .collect::>() + .join(""); + + write!(f, "{}", str) + } +} diff --git a/projects/ssddOnTop/src/mustache/parse.rs b/projects/ssddOnTop/src/mustache/parse.rs new file mode 100644 index 0000000..7eadda3 --- /dev/null +++ b/projects/ssddOnTop/src/mustache/parse.rs @@ -0,0 +1,272 @@ +use crate::mustache::model::{Mustache, Segment}; +use nom::branch::alt; +use nom::bytes::complete::{tag, take_until}; +use nom::character::complete::char; +use nom::combinator::map; +use nom::multi::many0; +use nom::sequence::delimited; +use nom::{Finish, IResult}; + +impl Mustache { + // TODO: infallible function, no need to return Result + pub fn parse(str: &str) -> Mustache { + let result = parse_mustache(str).finish(); + match result { + Ok((_, mustache)) => mustache, + Err(_) => Mustache::from(vec![Segment::Literal(str.to_string())]), + } + } +} + +fn parse_name(input: &str) -> IResult<&str, String> { + let spaces = nom::character::complete::multispace0; + let alpha = nom::character::complete::alpha1; + let alphanumeric_or_underscore = nom::multi::many0(nom::branch::alt(( + nom::character::complete::alphanumeric1, + nom::bytes::complete::tag("_"), + ))); + + let parser = nom::sequence::tuple((spaces, alpha, alphanumeric_or_underscore, spaces)); + + nom::combinator::map(parser, |(_, a, b, _)| { + let b: String = b.into_iter().collect(); + format!("{}{}", a, b) + })(input) +} + +fn parse_expression(input: &str) -> IResult<&str, Segment> { + delimited( + tag("{{"), + map( + nom::sequence::tuple(( + nom::combinator::opt(char('.')), // Optional leading dot + nom::multi::separated_list1(char('.'), parse_name), + )), + |(_, expr_parts)| Segment::Expression(expr_parts), + ), + tag("}}"), + )(input) +} + +fn parse_segment(input: &str) -> IResult<&str, Vec> { + let expression_result = many0(alt(( + parse_expression, + map(take_until("{{"), |txt: &str| { + Segment::Literal(txt.to_string()) + }), + )))(input); + + if let Ok((remaining, segments)) = expression_result { + if remaining.is_empty() { + Ok((remaining, segments)) + } else { + let mut segments = segments; + segments.push(Segment::Literal(remaining.to_string())); + Ok(("", segments)) + } + } else { + Ok(("", vec![Segment::Literal(input.to_string())])) + } +} + +fn parse_mustache(input: &str) -> IResult<&str, Mustache> { + map(parse_segment, |segments| { + Mustache::from(segments.into_iter().filter(|seg| match seg { + Segment::Literal(s) => (!s.is_empty()) && s != "\"", + _ => true, + })) + })(input) +} + +#[cfg(test)] +mod tests { + + use pretty_assertions::assert_eq; + + use crate::mustache::model::{Mustache, Segment}; + + #[test] + fn test_to_string() { + let expectations = vec![ + r"/users/{{value.id}}/todos", + r"http://localhost:8090/{{foo.bar}}/api/{{hello.world}}/end", + r"http://localhost:{{args.port}}", + r"/users/{{value.userId}}", + r"/bar?id={{args.id}}&flag={{args.flag}}", + r"/foo?id={{value.id}}", + r"{{value.d}}", + r"/posts/{{args.id}}", + r"http://localhost:8000", + ]; + + for expected in expectations { + let mustache = Mustache::parse(expected); + + assert_eq!(expected, mustache.to_string()); + } + } + + #[test] + fn test_single_literal() { + let s = r"hello/world"; + let mustache: Mustache = Mustache::parse(s); + assert_eq!( + mustache, + Mustache::from(vec![Segment::Literal("hello/world".to_string())]) + ); + } + + #[test] + fn test_single_template() { + let s = r"{{hello.world}}"; + let mustache: Mustache = Mustache::parse(s); + assert_eq!( + mustache, + Mustache::from(vec![Segment::Expression(vec![ + "hello".to_string(), + "world".to_string(), + ])]) + ); + } + + #[test] + fn test_mixed() { + let s = r"http://localhost:8090/{{foo.bar}}/api/{{hello.world}}/end"; + let mustache: Mustache = Mustache::parse(s); + assert_eq!( + mustache, + Mustache::from(vec![ + Segment::Literal("http://localhost:8090/".to_string()), + Segment::Expression(vec!["foo".to_string(), "bar".to_string()]), + Segment::Literal("/api/".to_string()), + Segment::Expression(vec!["hello".to_string(), "world".to_string()]), + Segment::Literal("/end".to_string()), + ]) + ); + } + + #[test] + fn test_with_spaces() { + let s = "{{ foo . bar }}"; + let mustache: Mustache = Mustache::parse(s); + assert_eq!( + mustache, + Mustache::from(vec![Segment::Expression(vec![ + "foo".to_string(), + "bar".to_string(), + ])]) + ); + } + + #[test] + fn test_parse_expression_with_valid_input() { + let result = Mustache::parse("{{ foo.bar }} extra"); + let expected = Mustache::from(vec![ + Segment::Expression(vec!["foo".to_string(), "bar".to_string()]), + Segment::Literal(" extra".to_string()), + ]); + assert_eq!(result, expected); + } + + #[test] + fn test_parse_expression_with_invalid_input() { + let result = Mustache::parse("foo.bar }}"); + let expected = Mustache::from(vec![Segment::Literal("foo.bar }}".to_string())]); + assert_eq!(result, expected); + } + + #[test] + fn test_parse_segments_mixed() { + let result = Mustache::parse("prefix {{foo.bar}} middle {{baz.qux}} suffix"); + let expected = Mustache::from(vec![ + Segment::Literal("prefix ".to_string()), + Segment::Expression(vec!["foo".to_string(), "bar".to_string()]), + Segment::Literal(" middle ".to_string()), + Segment::Expression(vec!["baz".to_string(), "qux".to_string()]), + Segment::Literal(" suffix".to_string()), + ]); + assert_eq!(result, expected); + } + + #[test] + fn test_parse_segments_only_literal() { + let result = Mustache::parse("just a string"); + let expected = Mustache::from(vec![Segment::Literal("just a string".to_string())]); + assert_eq!(result, expected); + } + + #[test] + fn test_parse_segments_only_expression() { + let result = Mustache::parse("{{foo.bar}}"); + let expected = Mustache::from(vec![Segment::Expression(vec![ + "foo".to_string(), + "bar".to_string(), + ])]); + assert_eq!(result, expected); + } + + #[test] + fn test_unfinished_expression() { + let s = r"{{hello.world"; + let mustache: Mustache = Mustache::parse(s); + assert_eq!( + mustache, + Mustache::from(vec![Segment::Literal("{{hello.world".to_string())]) + ); + } + + #[test] + fn test_new_number() { + let mustache = Mustache::parse("123"); + assert_eq!( + mustache, + Mustache::from(vec![Segment::Literal("123".to_string())]) + ); + } + + #[test] + fn parse_env_name() { + let result = Mustache::parse("{{env.FOO}}"); + assert_eq!( + result, + Mustache::from(vec![Segment::Expression(vec![ + "env".to_string(), + "FOO".to_string(), + ])]) + ); + } + + #[test] + fn parse_env_with_underscores() { + let result = Mustache::parse("{{env.FOO_BAR}}"); + assert_eq!( + result, + Mustache::from(vec![Segment::Expression(vec![ + "env".to_string(), + "FOO_BAR".to_string(), + ])]) + ); + } + + #[test] + fn single_curly_brackets() { + let result = Mustache::parse("test:{SHA}string"); + assert_eq!( + result, + Mustache::from(vec![Segment::Literal("test:{SHA}string".to_string())]) + ); + } + + #[test] + fn test_optional_dot_expression() { + let s = r"{{.foo.bar}}"; + let mustache: Mustache = Mustache::parse(s); + assert_eq!( + mustache, + Mustache::from(vec![Segment::Expression(vec![ + "foo".to_string(), + "bar".to_string(), + ])]) + ); + } +} diff --git a/projects/ssddOnTop/src/path.rs b/projects/ssddOnTop/src/path.rs new file mode 100644 index 0000000..f930be6 --- /dev/null +++ b/projects/ssddOnTop/src/path.rs @@ -0,0 +1,543 @@ +use std::borrow::Cow; + +use crate::ir::eval_ctx::EvalContext; +use crate::json::JsonLike; +use crate::value::Value; +use serde_json::json; + +/// +/// The path module provides a trait for accessing values from a JSON-like +/// structure. + +/// +/// The PathString trait provides a method for accessing values from a JSON-like +/// structure. The returned value is encoded as a plain string. +/// This is typically used in evaluating mustache templates. +pub trait PathString { + fn path_string<'a, T: AsRef>(&'a self, path: &'a [T]) -> Option>; +} + +/// PathValue trait provides a method for accessing values from JSON-like +/// structure, the returned value is wrapped with RawValue enum, delegating +/// encoding to the client of this method. +pub trait PathValue { + fn raw_value<'a, T: AsRef>(&'a self, path: &[T]) -> Option>; +} + +/// +/// The PathGraphql trait provides a method for accessing values from a +/// JSON-like structure. The returned value is encoded as a GraphQL Value. +pub trait PathGraphql { + fn path_graphql>(&self, path: &[T]) -> Option; +} + +impl PathString for serde_json::Value { + fn path_string<'a, T: AsRef>(&'a self, path: &'a [T]) -> Option> { + self.get_path(path).map(move |a| match a { + serde_json::Value::String(s) => Cow::Borrowed(s.as_str()), + _ => Cow::Owned(a.to_string()), + }) + } +} + +fn convert_value(value: Cow<'_, Value>) -> Option> { + // let value = value.serde(); + match value { + Cow::Owned(val) => { + let val = val.into_serde(); + match val { + serde_json::Value::String(s) => Some(Cow::Owned(s)), + serde_json::Value::Number(n) => Some(Cow::Owned(n.to_string())), + serde_json::Value::Bool(b) => Some(Cow::Owned(b.to_string())), + serde_json::Value::Object(map) => Some(json!(map).to_string().into()), + serde_json::Value::Array(list) => Some(json!(list).to_string().into()), + _ => None, + } + } + Cow::Borrowed(val) => { + let val = val.serde(); + match val { + serde_json::Value::String(s) => Some(Cow::Borrowed(s)), + serde_json::Value::Number(n) => Some(Cow::Owned(n.to_string())), + serde_json::Value::Bool(b) => Some(Cow::Owned(b.to_string())), + serde_json::Value::Object(map) => Some(json!(map).to_string().into()), + serde_json::Value::Array(list) => Some(json!(list).to_string().into()), + _ => None, + } + } + _ => None, + } +} + +/// +/// An optimized version of async_graphql::Value that handles strings in a more +/// efficient manner. +#[derive(Debug)] +pub enum ValueString<'a> { + Value(Cow<'a, Value>), + String(Cow<'a, str>), +} + +impl<'a> EvalContext<'a> { + fn to_raw_value>(&self, path: &[T]) -> Option> { + let ctx = self; + + if path.is_empty() { + println!("none0"); + return None; + } + + if path.len() == 1 { + return match path[0].as_ref() { + "value" => Some(ValueString::Value(ctx.path_value(&[] as &[T])?)), + "args" => Some(ValueString::Value(ctx.path_arg::<&str>(&[])?)), + /* "vars" => Some(ValueString::String(Cow::Owned( + json!(ctx.vars()).to_string(), + ))),*/ + _ => { + println!("none"); + None + } + }; + } + + path.split_first() + .and_then(move |(head, tail)| match head.as_ref() { + "value" => Some(ValueString::Value(ctx.path_value(tail)?)), + "args" => Some(ValueString::Value(ctx.path_arg(tail)?)), + /* "headers" => Some(ValueString::String(Cow::Borrowed( + ctx.header(tail[0].as_ref())?, + ))), + "vars" => Some(ValueString::String(Cow::Borrowed( + ctx.var(tail[0].as_ref())?, + ))),*/ + // "env" => Some(ValueString::String(ctx.env_var(tail[0].as_ref())?)), + _ => { + println!("none1"); + None + } + }) + } +} + +impl<'a> PathValue for EvalContext<'a> { + fn raw_value<'b, T: AsRef>(&'b self, path: &[T]) -> Option> { + self.to_raw_value(path) + } +} + +impl<'a> PathString for EvalContext<'a> { + fn path_string>(&self, path: &[T]) -> Option> { + self.to_raw_value(path).and_then(|value| match value { + ValueString::String(env) => Some(env), + ValueString::Value(value) => convert_value(value), + }) + } +} + +impl<'a> PathGraphql for EvalContext<'a> { + fn path_graphql>(&self, path: &[T]) -> Option { + if path.len() < 2 { + return None; + } + + self.to_raw_value(path).map(|value| match value { + ValueString::Value(val) => val.to_string(), + ValueString::String(val) => format!(r#""{val}""#), + }) + } +} + +/*#[cfg(test)] +mod tests { + + mod evaluation_context { + use std::borrow::Cow; + use std::collections::BTreeMap; + use std::sync::Arc; + + use async_graphql_value::{ConstValue as Value, Name, Number}; + use hyper::header::HeaderValue; + use hyper::HeaderMap; + use indexmap::IndexMap; + use once_cell::sync::Lazy; + + use crate::core::http::RequestContext; + use crate::core::ir::{EvalContext, ResolverContextLike, SelectionField}; + use crate::core::path::{PathGraphql, PathString, PathValue, ValueString}; + use crate::core::EnvIO; + use crate::path::ValueString; + + struct Env { + env: BTreeMap, + } + + impl EnvIO for Env { + fn get(&self, key: &str) -> Option> { + self.env.get(key).map(Cow::from) + } + } + + impl Env { + pub fn init(map: BTreeMap) -> Self { + Self { env: map } + } + } + + static TEST_VALUES: Lazy = Lazy::new(|| { + let mut root = IndexMap::new(); + let mut nested = IndexMap::new(); + + nested.insert( + Name::new("existing"), + Value::String("nested-test".to_owned()), + ); + root.insert(Name::new("bool"), Value::Boolean(true)); + root.insert(Name::new("nested"), Value::Object(nested)); + root.insert(Name::new("number"), Value::Number(Number::from(2))); + root.insert(Name::new("str"), Value::String("str-test".to_owned())); + + Value::Object(root) + }); + + static TEST_ARGS: Lazy> = Lazy::new(|| { + let mut root = IndexMap::new(); + let mut nested = IndexMap::new(); + + nested.insert( + Name::new("existing"), + Value::String("nested-test".to_owned()), + ); + + root.insert(Name::new("nested"), Value::Object(nested)); + root.insert(Name::new("root"), Value::String("root-test".to_owned())); + + root + }); + + static TEST_HEADERS: Lazy = Lazy::new(|| { + let mut map = HeaderMap::new(); + + map.insert("x-existing", HeaderValue::from_static("header")); + + map + }); + + static TEST_VARS: Lazy> = Lazy::new(|| { + let mut map = BTreeMap::new(); + + map.insert("existing".to_owned(), "var".to_owned()); + + map + }); + + static TEST_ENV_VARS: Lazy> = Lazy::new(|| { + let mut map = BTreeMap::new(); + + map.insert("existing".to_owned(), "env".to_owned()); + + map + }); + + #[derive(Clone)] + struct MockGraphqlContext; + + impl ResolverContextLike for MockGraphqlContext { + fn value(&self) -> Option<&Value> { + Some(&TEST_VALUES) + } + + fn args(&self) -> Option<&IndexMap> { + Some(&TEST_ARGS) + } + + fn field(&self) -> Option { + None + } + + fn is_query(&self) -> bool { + false + } + + fn add_error(&self, _: async_graphql::ServerError) {} + } + + static REQ_CTX: Lazy = Lazy::new(|| { + let mut req_ctx = RequestContext::default().allowed_headers(TEST_HEADERS.clone()); + + req_ctx.server.vars = TEST_VARS.clone(); + req_ctx.runtime.env = Arc::new(Env::init(TEST_ENV_VARS.clone())); + + req_ctx + }); + + static EVAL_CTX: Lazy> = + Lazy::new(|| EvalContext::new(&REQ_CTX, &MockGraphqlContext)); + + #[test] + fn path_to_value() { + let mut map = IndexMap::default(); + map.insert( + async_graphql_value::Name::new("number"), + async_graphql::Value::Number(2.into()), + ); + map.insert( + async_graphql_value::Name::new("str"), + async_graphql::Value::String("str-test".into()), + ); + map.insert( + async_graphql_value::Name::new("bool"), + async_graphql::Value::Boolean(true), + ); + let mut nested_map = IndexMap::default(); + nested_map.insert( + async_graphql_value::Name::new("existing"), + async_graphql::Value::String("nested-test".into()), + ); + map.insert( + async_graphql_value::Name::new("nested"), + async_graphql::Value::Object(nested_map), + ); + + // value + assert_eq!( + EVAL_CTX.raw_value(&["value", "bool"]), + Some(ValueString::Value(Cow::Borrowed( + &async_graphql::Value::Boolean(true) + ))) + ); + assert_eq!( + EVAL_CTX.raw_value(&["value", "number"]), + Some(ValueString::Value(Cow::Borrowed( + &async_graphql::Value::Number(2.into()) + ))) + ); + assert_eq!( + EVAL_CTX.raw_value(&["value", "str"]), + Some(ValueString::Value(Cow::Borrowed( + &async_graphql::Value::String("str-test".into()) + ))) + ); + assert_eq!(EVAL_CTX.raw_value(&["value", "missing"]), None); + assert_eq!(EVAL_CTX.raw_value(&["value", "nested", "missing"]), None); + assert_eq!( + EVAL_CTX.raw_value(&["value"]), + Some(ValueString::Value(Cow::Borrowed( + &async_graphql::Value::Object(map.clone()), + ))) + ); + + // args + assert_eq!( + EVAL_CTX.raw_value(&["args", "root"]), + Some(ValueString::Value(Cow::Borrowed( + &async_graphql::Value::String("root-test".into()), + ))) + ); + + let mut expected = IndexMap::new(); + expected.insert( + async_graphql_value::Name::new("existing"), + async_graphql::Value::String("nested-test".into()), + ); + assert_eq!( + EVAL_CTX.raw_value(&["args", "nested"]), + Some(ValueString::Value(Cow::Borrowed( + &async_graphql::Value::Object(expected) + ))) + ); + + assert_eq!(EVAL_CTX.raw_value(&["args", "missing"]), None); + assert_eq!(EVAL_CTX.raw_value(&["args", "nested", "missing"]), None); + + let mut expected = IndexMap::new(); + let mut nested_map = IndexMap::new(); + nested_map.insert( + async_graphql_value::Name::new("existing"), + async_graphql::Value::String("nested-test".into()), + ); + expected.insert( + async_graphql_value::Name::new("nested"), + async_graphql::Value::Object(nested_map), + ); + expected.insert( + async_graphql_value::Name::new("root"), + async_graphql::Value::String("root-test".into()), + ); + assert_eq!( + EVAL_CTX.raw_value(&["args"]), + Some(ValueString::Value(Cow::Borrowed( + &async_graphql::Value::Object(expected) + ))) + ); + + // headers + assert_eq!( + EVAL_CTX.raw_value(&["headers", "x-existing"]), + Some(ValueString::String(Cow::Borrowed("header"))) + ); + assert_eq!(EVAL_CTX.raw_value(&["headers", "x-missing"]), None); + + // vars + assert_eq!( + EVAL_CTX.raw_value(&["vars", "existing"]), + Some(ValueString::String(Cow::Borrowed("var"))) + ); + assert_eq!(EVAL_CTX.raw_value(&["vars", "missing"]), None); + assert_eq!( + EVAL_CTX.raw_value(&["vars"]), + Some(ValueString::String(Cow::Borrowed(r#"{"existing":"var"}"#))) + ); + + // envs + assert_eq!( + EVAL_CTX.raw_value(&["env", "existing"]), + Some(ValueString::String(Cow::Borrowed("env"))) + ); + assert_eq!(EVAL_CTX.raw_value(&["env", "x-missing"]), None); + + // other value types + assert_eq!(EVAL_CTX.raw_value(&["foo", "key"]), None); + assert_eq!(EVAL_CTX.raw_value(&["bar", "key"]), None); + assert_eq!(EVAL_CTX.raw_value(&["baz", "key"]), None); + } + + #[test] + fn path_to_string() { + // value + assert_eq!( + EVAL_CTX.path_string(&["value", "bool"]), + Some(Cow::Borrowed("true")) + ); + assert_eq!( + EVAL_CTX.path_string(&["value", "number"]), + Some(Cow::Borrowed("2")) + ); + assert_eq!( + EVAL_CTX.path_string(&["value", "str"]), + Some(Cow::Borrowed("str-test")) + ); + assert_eq!( + EVAL_CTX.path_string(&["value", "nested"]), + Some(Cow::Borrowed("{\"existing\":\"nested-test\"}")) + ); + assert_eq!(EVAL_CTX.path_string(&["value", "missing"]), None); + assert_eq!(EVAL_CTX.path_string(&["value", "nested", "missing"]), None); + assert_eq!( + EVAL_CTX.path_string(&["value"]), + Some(Cow::Borrowed( + r#"{"bool":true,"nested":{"existing":"nested-test"},"number":2,"str":"str-test"}"# + )) + ); + + // args + assert_eq!( + EVAL_CTX.path_string(&["args", "root"]), + Some(Cow::Borrowed("root-test")) + ); + assert_eq!( + EVAL_CTX.path_string(&["args", "nested"]), + Some(Cow::Borrowed("{\"existing\":\"nested-test\"}")) + ); + assert_eq!(EVAL_CTX.path_string(&["args", "missing"]), None); + assert_eq!(EVAL_CTX.path_string(&["args", "nested", "missing"]), None); + assert_eq!( + EVAL_CTX.path_string(&["args"]), + Some(Cow::Borrowed( + r#"{"nested":{"existing":"nested-test"},"root":"root-test"}"# + )) + ); + + // headers + assert_eq!( + EVAL_CTX.path_string(&["headers", "x-existing"]), + Some(Cow::Borrowed("header")) + ); + assert_eq!(EVAL_CTX.path_string(&["headers", "x-missing"]), None); + + // vars + assert_eq!( + EVAL_CTX.path_string(&["vars", "existing"]), + Some(Cow::Borrowed("var")) + ); + assert_eq!(EVAL_CTX.path_string(&["vars", "missing"]), None); + assert_eq!( + EVAL_CTX.path_string(&["vars"]), + Some(Cow::Borrowed(r#"{"existing":"var"}"#)) + ); + + // envs + assert_eq!( + EVAL_CTX.path_string(&["env", "existing"]), + Some(Cow::Borrowed("env")) + ); + assert_eq!(EVAL_CTX.path_string(&["env", "x-missing"]), None); + + // other value types + assert_eq!(EVAL_CTX.path_string(&["foo", "key"]), None); + assert_eq!(EVAL_CTX.path_string(&["bar", "key"]), None); + assert_eq!(EVAL_CTX.path_string(&["baz", "key"]), None); + } + + #[test] + fn path_to_graphql_string() { + // value + assert_eq!( + EVAL_CTX.path_graphql(&["value", "bool"]), + Some("true".to_owned()) + ); + assert_eq!( + EVAL_CTX.path_graphql(&["value", "number"]), + Some("2".to_owned()) + ); + assert_eq!( + EVAL_CTX.path_graphql(&["value", "str"]), + Some("\"str-test\"".to_owned()) + ); + assert_eq!( + EVAL_CTX.path_graphql(&["value", "nested"]), + Some("{existing: \"nested-test\"}".to_owned()) + ); + assert_eq!(EVAL_CTX.path_graphql(&["value", "missing"]), None); + assert_eq!(EVAL_CTX.path_graphql(&["value", "nested", "missing"]), None); + + // args + assert_eq!( + EVAL_CTX.path_graphql(&["args", "root"]), + Some("\"root-test\"".to_owned()) + ); + assert_eq!( + EVAL_CTX.path_graphql(&["args", "nested"]), + Some("{existing: \"nested-test\"}".to_owned()) + ); + assert_eq!(EVAL_CTX.path_graphql(&["args", "missing"]), None); + assert_eq!(EVAL_CTX.path_graphql(&["args", "nested", "missing"]), None); + + // headers + assert_eq!( + EVAL_CTX.path_graphql(&["headers", "x-existing"]), + Some("\"header\"".to_owned()) + ); + assert_eq!(EVAL_CTX.path_graphql(&["headers", "x-missing"]), None); + + // vars + assert_eq!( + EVAL_CTX.path_graphql(&["vars", "existing"]), + Some("\"var\"".to_owned()) + ); + assert_eq!(EVAL_CTX.path_graphql(&["vars", "missing"]), None); + + // envs + assert_eq!( + EVAL_CTX.path_graphql(&["env", "existing"]), + Some("\"env\"".to_owned()) + ); + assert_eq!(EVAL_CTX.path_graphql(&["env", "x-missing"]), None); + + // other value types + assert_eq!(EVAL_CTX.path_graphql(&["foo", "key"]), None); + assert_eq!(EVAL_CTX.path_graphql(&["bar", "key"]), None); + assert_eq!(EVAL_CTX.path_graphql(&["baz", "key"]), None); + } + } +}*/ diff --git a/projects/ssddOnTop/src/request_context.rs b/projects/ssddOnTop/src/request_context.rs new file mode 100644 index 0000000..8b24247 --- /dev/null +++ b/projects/ssddOnTop/src/request_context.rs @@ -0,0 +1,105 @@ +use crate::app_ctx::AppCtx; +use crate::blueprint::{Server, Upstream}; +use crate::ir::IoId; +use crate::target_runtime::cache::InMemoryCache; +use crate::target_runtime::TargetRuntime; +use crate::value::Value; +use anyhow::Error; +use derive_setters::Setters; +use std::num::NonZeroU64; +use std::sync::{Arc, Mutex}; + +#[derive(Clone)] +pub struct CacheErr(String); + +impl From for CacheErr { + fn from(value: Error) -> Self { + CacheErr(value.to_string()) + } +} + +impl From for anyhow::Error { + fn from(value: CacheErr) -> Self { + anyhow::Error::msg(value.0) + } +} + +#[derive(Setters)] +pub struct RequestContext { + pub server: Server, + pub upstream: Upstream, + pub min_max_age: Arc>>, + pub cache_public: Arc>>, + pub runtime: TargetRuntime, + pub cache: InMemoryCache, + // pub cache: Dedupe>, +} + +impl RequestContext { + pub fn new(target_runtime: TargetRuntime) -> RequestContext { + RequestContext { + server: Default::default(), + upstream: Default::default(), + min_max_age: Arc::new(Mutex::new(None)), + cache_public: Arc::new(Mutex::new(None)), + runtime: target_runtime, + // cache: Dedupe::new(1, true), + cache: InMemoryCache::new(), + } + } + fn set_min_max_age_conc(&self, min_max_age: i32) { + *self.min_max_age.lock().unwrap() = Some(min_max_age); + } + pub fn get_min_max_age(&self) -> Option { + *self.min_max_age.lock().unwrap() + } + + pub fn set_cache_public_false(&self) { + *self.cache_public.lock().unwrap() = Some(false); + } + + pub fn is_cache_public(&self) -> Option { + *self.cache_public.lock().unwrap() + } + + pub fn set_min_max_age(&self, max_age: i32) { + let min_max_age_lock = self.get_min_max_age(); + match min_max_age_lock { + Some(min_max_age) if max_age < min_max_age => { + self.set_min_max_age_conc(max_age); + } + None => { + self.set_min_max_age_conc(max_age); + } + _ => {} + } + } + + pub async fn cache_get(&self, key: &IoId) -> Result, anyhow::Error> { + self.runtime.cache.get(key).await + } + + #[allow(clippy::too_many_arguments)] + pub async fn cache_insert( + &self, + key: IoId, + value: Value, + ttl: NonZeroU64, + ) -> Result<(), anyhow::Error> { + self.runtime.cache.set(key, value, ttl).await + } +} + +impl From<&AppCtx> for RequestContext { + fn from(app_ctx: &AppCtx) -> Self { + Self { + server: app_ctx.blueprint.server.clone(), + upstream: app_ctx.blueprint.upstream.clone(), + min_max_age: Arc::new(Mutex::new(None)), + cache_public: Arc::new(Mutex::new(None)), + runtime: app_ctx.runtime.clone(), + // cache: Dedupe::new(1, true), + cache: InMemoryCache::new(), + } + } +} diff --git a/projects/ssddOnTop/src/run/http1.rs b/projects/ssddOnTop/src/run/http1.rs new file mode 100644 index 0000000..6959b9f --- /dev/null +++ b/projects/ssddOnTop/src/run/http1.rs @@ -0,0 +1,43 @@ +use crate::app_ctx::AppCtx; +use crate::http::request_handler::handle_request; +use hyper::service::service_fn; +use tokio::net::TcpListener; + +pub async fn start(app_ctx: AppCtx) -> anyhow::Result<()> { + let addr = app_ctx.blueprint.server.addr(); + let listener = TcpListener::bind(addr).await?; + + tracing::info!("Listening on: http://{}", addr); + loop { + let app_ctx = app_ctx.clone(); + + let stream_result = listener.accept().await; + match stream_result { + Ok((stream, _)) => { + let io = hyper_util::rt::TokioIo::new(stream); + tokio::spawn(async move { + let app_ctx = app_ctx.clone(); + + let server = hyper::server::conn::http1::Builder::new() + .serve_connection( + io, + service_fn(move |req| { + let app_ctx = app_ctx.clone(); + + async move { + let req = + crate::http::request::Request::from_hyper(req).await?; + handle_request(req, app_ctx).await + } + }), + ) + .await; + if let Err(e) = server { + tracing::error!("An error occurred while handling a request: {e}"); + } + }); + } + Err(e) => tracing::error!("An error occurred while handling request: {e}"), + } + } +} diff --git a/projects/ssddOnTop/src/run/mod.rs b/projects/ssddOnTop/src/run/mod.rs new file mode 100644 index 0000000..e57406b --- /dev/null +++ b/projects/ssddOnTop/src/run/mod.rs @@ -0,0 +1,4 @@ +mod http1; +mod run; + +pub use run::*; diff --git a/projects/ssddOnTop/src/run/run.rs b/projects/ssddOnTop/src/run/run.rs new file mode 100644 index 0000000..e76d58a --- /dev/null +++ b/projects/ssddOnTop/src/run/run.rs @@ -0,0 +1,23 @@ +use crate::app_ctx::AppCtx; +use crate::blueprint::Blueprint; +use crate::config::ConfigReader; +use crate::run; +use crate::target_runtime::TargetRuntime; +use std::sync::Arc; + +pub async fn run() -> anyhow::Result<()> { + let config_reader = ConfigReader::init(); + let path = std::env::args().collect::>(); + let path = path.get(1).cloned().unwrap_or({ + let root = env!("CARGO_MANIFEST_DIR"); + format!("{}/schema/schema.graphql", root) + }); + + let config = config_reader.read(path)?; + + let blueprint = Blueprint::try_from(&config)?; + let rt = TargetRuntime::new(&blueprint.upstream); + let app_ctx = AppCtx::new(rt, Arc::new(blueprint)); + run::http1::start(app_ctx).await?; + Ok(()) +} diff --git a/projects/ssddOnTop/src/target_runtime.rs b/projects/ssddOnTop/src/target_runtime.rs new file mode 100644 index 0000000..b6c6ea7 --- /dev/null +++ b/projects/ssddOnTop/src/target_runtime.rs @@ -0,0 +1,111 @@ +use crate::blueprint::Upstream; +use crate::ir::IoId; +use crate::target_runtime::http::NativeHttp; +use crate::value::Value; +use std::sync::Arc; + +#[derive(Clone)] +pub struct TargetRuntime { + /// HTTP client for making standard HTTP requests. + pub http: Arc, + pub cache: Arc>, +} + +impl TargetRuntime { + pub fn new(upstream: &Upstream) -> Self { + let http = Arc::new(NativeHttp::init(upstream)); + let cache = Arc::new(cache::InMemoryCache::new()); + Self { http, cache } + } +} + +pub mod cache { + use std::hash::Hash; + use std::num::NonZeroU64; + use std::sync::{Arc, RwLock}; + use std::time::Duration; + + use ttl_cache::TtlCache; + + pub struct InMemoryCache { + data: Arc>>, + } + + const CACHE_CAPACITY: usize = 100000; + + impl Default for InMemoryCache { + fn default() -> Self { + Self::new() + } + } + + impl InMemoryCache { + pub fn new() -> Self { + InMemoryCache { + data: Arc::new(RwLock::new(TtlCache::new(CACHE_CAPACITY))), + } + } + } + + impl InMemoryCache { + pub async fn set<'a>(&'a self, key: K, value: V, ttl: NonZeroU64) -> anyhow::Result<()> { + let ttl = Duration::from_millis(ttl.get()); + self.data.write().unwrap().insert(key, value, ttl); + Ok(()) + } + + pub async fn get<'a>(&'a self, key: &'a K) -> anyhow::Result> { + let val = self.data.read().unwrap().get(key).cloned(); + Ok(val) + } + } +} + +mod http { + use crate::blueprint::Upstream; + use crate::cache::HttpCacheManager; + use crate::http::response::Response; + use anyhow::Result; + use bytes::Bytes; + use http_cache_reqwest::{Cache, CacheMode, HttpCache, HttpCacheOptions}; + use reqwest::Client; + use reqwest_middleware::{ClientBuilder, ClientWithMiddleware}; + + pub struct NativeHttp { + client: ClientWithMiddleware, + } + + impl NativeHttp { + pub fn init(upstream: &Upstream) -> Self { + let mut client = ClientBuilder::new(Client::new()); + + client = client.with(Cache(HttpCache { + mode: CacheMode::Default, + manager: HttpCacheManager::new(upstream.http_cache), + options: HttpCacheOptions::default(), + })); + + Self { + client: client.build(), + } + } + pub async fn execute(&self, request: reqwest::Request) -> Result> { + tracing::info!( + "{} {} {:?}", + request.method(), + request.url(), + request.version() + ); + tracing::debug!("request: {:?}", request); + let response = self.client.execute(request).await; + tracing::debug!("response: {:?}", response); + + Response::from_reqwest( + response? + .error_for_status() + .map_err(|err| err.without_url())?, + ) + .await + } + } +} diff --git a/projects/ssddOnTop/src/value/mod.rs b/projects/ssddOnTop/src/value/mod.rs new file mode 100644 index 0000000..a2c21a2 --- /dev/null +++ b/projects/ssddOnTop/src/value/mod.rs @@ -0,0 +1,3 @@ +mod value; + +pub use value::*; diff --git a/projects/ssddOnTop/src/value/value.rs b/projects/ssddOnTop/src/value/value.rs new file mode 100644 index 0000000..a4528f0 --- /dev/null +++ b/projects/ssddOnTop/src/value/value.rs @@ -0,0 +1,47 @@ +use derive_getters::Getters; +use std::fmt::{Display, Formatter}; + +#[derive(Getters, Debug, Clone, PartialEq, Eq, Hash)] +pub struct Value { + serde: serde_json::Value, + // borrowed: serde_json_borrow::Value<'static>, +} + +impl Display for Value { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.serde) + } +} + +impl Value { + pub fn new(serde: serde_json::Value) -> Self { + // let borrowed = extend_lifetime(serde_json_borrow::Value::from(&serde)); + Self { + serde, + // borrowed, + } + } + pub fn into_serde(self) -> serde_json::Value { + self.serde + } +} + +fn extend_lifetime<'b>(r: serde_json_borrow::Value<'b>) -> serde_json_borrow::Value<'static> { + unsafe { + std::mem::transmute::, serde_json_borrow::Value<'static>>(r) + } +} + +#[cfg(test)] +mod test { + use super::*; + use serde_json::json; + + #[test] + fn test_value() { + let val = json!({"key": "value"}); + let value = Value::new(val.clone()); + assert_eq!(value.serde(), &val); + // assert_eq!(value.borrowed(), &serde_json_borrow::Value::from(&val)); + } +} \ No newline at end of file