diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ad57ad27eb..004dad0016 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -401,7 +401,7 @@ jobs: - name: NPM Publish if: (startsWith(github.event.head_commit.message, 'feat') || startsWith(github.event.head_commit.message, 'fix')) && (github.event_name == 'push' && github.ref == 'refs/heads/main') - uses: JS-DevTools/npm-publish@v3 + uses: JS-DevTools/npm-publish@main with: token: ${{ secrets.NPM_TOKEN }} package: npm/@tailcallhq/core-${{matrix.build}} @@ -506,11 +506,11 @@ jobs: APP_VERSION: ${{needs.draft_release.outputs.create_release_name }} run: | cd npm - npm run gen-root -- --version ${{ env.APP_VERSION }} + npm run gen-root -- --version ${{ env.APP_VERSION }} --name @tailcallhq/tailcall - name: Setup .npmrc file to publish to npm run: echo "//registry.npmjs.org/:_authToken=$NODE_AUTH_TOKEN" > ~/.npmrc - name: Publish packages - uses: JS-DevTools/npm-publish@v3 + uses: JS-DevTools/npm-publish@main with: token: ${{ secrets.NPM_TOKEN }} package: npm/@tailcallhq/tailcall diff --git a/.github/workflows/nginx-benchmark.yml b/.github/workflows/nginx-benchmark.yml index 939370dfb1..bb108e365e 100644 --- a/.github/workflows/nginx-benchmark.yml +++ b/.github/workflows/nginx-benchmark.yml @@ -31,7 +31,8 @@ jobs: - name: Run Tailcall run: | - TAILCALL_LOG_LEVEL=error ./target/release/tailcall start ci-benchmark/nginx-benchmark.graphql & + TAILCALL_LOG_LEVEL=error ./target/release/tailcall start ci-benchmark/nginx-benchmark.graphql > tailcall.log 2>&1 & + echo $! > tailcall_pid.txt - name: Install Nginx run: | @@ -59,7 +60,19 @@ jobs: name: Run Wrk working-directory: ci-benchmark run: | - wrk -d 30 -t 4 -c 100 -s wrk.lua http://localhost:8000/graphql > wrk-output.txt + wrk -d 30 -t 4 -c 100 -s wrk.lua http://localhost:8000/graphql | tee wrk-output.txt + + - id: check_tailcall + name: Check Tailcall Status + run: | + tailcall_pid=$(cat tailcall_pid.txt) + if ! kill -0 $tailcall_pid > /dev/null 2>&1; then + echo "Tailcall process has crashed. Log output:" >&2 + cat tailcall.log >&2 + exit 1 + else + echo "Tailcall process is still running" + fi - id: convert_wrk_output_markdown name: Convert Output to Markdown diff --git a/Cargo.lock b/Cargo.lock index 71aeff98be..0c1daf68b6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14,9 +14,9 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.21.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" dependencies = [ "gimli", ] @@ -113,9 +113,9 @@ dependencies = [ [[package]] name = "anstyle-query" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" +checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" dependencies = [ "windows-sys 0.52.0", ] @@ -195,16 +195,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" dependencies = [ "concurrent-queue", - "event-listener-strategy 0.5.2", + "event-listener-strategy", "futures-core", "pin-project-lite", ] [[package]] name = "async-executor" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b10202063978b3351199d68f8b22c4e47e4b1b822f8d43fd862d5ea8c006b29a" +checksum = "c8828ec6e544c02b0d6691d21ed9f9218d0384a82542855073c2a3f58304aaf0" dependencies = [ "async-task", "concurrent-queue", @@ -221,8 +221,8 @@ checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ "async-channel 2.3.1", "async-executor", - "async-io 2.3.2", - "async-lock 3.3.0", + "async-io 2.3.3", + "async-lock 3.4.0", "blocking", "futures-lite 2.3.0", "once_cell", @@ -363,17 +363,17 @@ dependencies = [ [[package]] name = "async-io" -version = "2.3.2" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcccb0f599cfa2f8ace422d3555572f47424da5648a4382a9dd0310ff8210884" +checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" dependencies = [ - "async-lock 3.3.0", + "async-lock 3.4.0", "cfg-if", "concurrent-queue", "futures-io", "futures-lite 2.3.0", "parking", - "polling 3.7.0", + "polling 3.7.1", "rustix 0.38.34", "slab", "tracing", @@ -391,12 +391,12 @@ dependencies = [ [[package]] name = "async-lock" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" dependencies = [ - "event-listener 4.0.3", - "event-listener-strategy 0.4.0", + "event-listener 5.3.1", + "event-listener-strategy", "pin-project-lite", ] @@ -439,12 +439,12 @@ dependencies = [ [[package]] name = "async-signal" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afe66191c335039c7bb78f99dc7520b0cbb166b3a1cb33a03f53d8a1c6f2afda" +checksum = "794f185324c2f00e771cd9f1ae8b5ac68be2ca7abb129a87afd6e86d228bc54d" dependencies = [ - "async-io 2.3.2", - "async-lock 3.3.0", + "async-io 2.3.3", + "async-lock 3.4.0", "atomic-waker", "cfg-if", "futures-core", @@ -544,7 +544,7 @@ dependencies = [ "bytes", "http 1.1.0", "http-body 1.0.0", - "http-serde 2.1.0", + "http-serde 2.1.1", "query_map", "serde", "serde_json", @@ -597,9 +597,9 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.71" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" +checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11" dependencies = [ "addr2line", "cc", @@ -708,12 +708,11 @@ dependencies = [ [[package]] name = "blocking" -version = "1.6.0" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "495f7104e962b7356f0aeb34247aca1fe7d2e783b346582db7f2904cb5717e88" +checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" dependencies = [ "async-channel 2.3.1", - "async-lock 3.3.0", "async-task", "futures-io", "futures-lite 2.3.0", @@ -808,9 +807,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.98" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" +checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695" [[package]] name = "cfg-if" @@ -915,9 +914,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" +checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" [[package]] name = "colorchoice" @@ -1450,12 +1449,9 @@ dependencies = [ [[package]] name = "escape8259" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba4f4911e3666fcd7826997b4745c8224295a6f3072f1418c3067b97a67557ee" -dependencies = [ - "rustversion", -] +checksum = "5692dd7b5a1978a5aeb0ce83b7655c58ca8efdcb79d21036ea249da95afec2c6" [[package]] name = "event-listener" @@ -1476,43 +1472,22 @@ dependencies = [ [[package]] name = "event-listener" -version = "4.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener" -version = "5.3.0" +version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d9944b8ca13534cdfb2800775f8dd4902ff3fc75a50101466decadfdf322a24" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" dependencies = [ "concurrent-queue", "parking", "pin-project-lite", ] -[[package]] -name = "event-listener-strategy" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" -dependencies = [ - "event-listener 4.0.3", - "pin-project-lite", -] - [[package]] name = "event-listener-strategy" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" dependencies = [ - "event-listener 5.3.0", + "event-listener 5.3.1", "pin-project-lite", ] @@ -1539,8 +1514,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2" dependencies = [ "bit-set", - "regex-automata 0.4.6", - "regex-syntax 0.8.3", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", ] [[package]] @@ -1769,9 +1744,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "glob" @@ -2204,9 +2179,9 @@ dependencies = [ [[package]] name = "http-serde" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1133cafcce27ea69d35e56b3a8772e265633e04de73c5f4e1afdffc1d19b5419" +checksum = "0f056c8559e3757392c8d091e796416e4649d8e49e88b8d76df6c002f05027fd" dependencies = [ "http 1.1.0", "serde", @@ -2214,9 +2189,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.8.0" +version = "1.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "9f3935c160d00ac752e09787e6e6bfc26494c2183cc922f1bc678a60d4733bc2" [[package]] name = "httpdate" @@ -2269,7 +2244,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.7", + "socket2 0.4.10", "tokio", "tower-service", "tracing", @@ -2644,15 +2619,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.11" @@ -2706,7 +2672,7 @@ dependencies = [ "petgraph", "pico-args", "regex", - "regex-syntax 0.8.3", + "regex-syntax 0.8.4", "string_cache", "term", "tiny-keccak", @@ -2720,17 +2686,17 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "regex-automata 0.4.6", + "regex-automata 0.4.7", ] [[package]] name = "lambda_http" -version = "0.11.1" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ebde9aedfdf8c2bac4365d9adafae6ca6d631b0ea221734ac802595fcd141cb" +checksum = "e8cce88e904c251a1f4f7a40d8ff05446df5fe3b62105d587a9818608e5d866e" dependencies = [ "aws_lambda_events", - "base64 0.21.7", + "base64 0.22.1", "bytes", "encoding_rs", "futures", @@ -2752,9 +2718,9 @@ dependencies = [ [[package]] name = "lambda_runtime" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae4606aea513f0e614497c0c4556d0e39f51a8434d9d97e592d32f9e615d4232" +checksum = "9be8f0e7a5db270feb93a7a3593c22a4c5fb8e8f260f5f490e0c3a5ffeb009db" dependencies = [ "async-stream", "base64 0.22.1", @@ -2763,7 +2729,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "http-body-util", - "http-serde 2.1.0", + "http-serde 2.1.1", "hyper 1.3.1", "hyper-util", "lambda_runtime_api_client", @@ -2926,36 +2892,13 @@ dependencies = [ "value-bag", ] -[[package]] -name = "logos" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c000ca4d908ff18ac99b93a062cb8958d331c3220719c52e77cb19cc6ac5d2c1" -dependencies = [ - "logos-derive 0.13.0", -] - [[package]] name = "logos" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "161971eb88a0da7ae0c333e1063467c5b5727e7fb6b710b8db4814eade3a42e8" dependencies = [ - "logos-derive 0.14.0", -] - -[[package]] -name = "logos-codegen" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc487311295e0002e452025d6b580b77bb17286de87b57138f3b5db711cded68" -dependencies = [ - "beef", - "fnv", - "proc-macro2", - "quote", - "regex-syntax 0.6.29", - "syn 2.0.66", + "logos-derive", ] [[package]] @@ -2969,26 +2912,17 @@ dependencies = [ "lazy_static", "proc-macro2", "quote", - "regex-syntax 0.8.3", + "regex-syntax 0.8.4", "syn 2.0.66", ] -[[package]] -name = "logos-derive" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfc0d229f1f42d790440136d941afd806bc9e949e2bcb8faa813b0f00d1267e" -dependencies = [ - "logos-codegen 0.13.0", -] - [[package]] name = "logos-derive" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c2a69b3eb68d5bd595107c9ee58d7e07fe2bb5e360cc85b0f084dedac80de0a" dependencies = [ - "logos-codegen 0.14.0", + "logos-codegen", ] [[package]] @@ -3199,12 +3133,12 @@ version = "0.12.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e0d88686dc561d743b40de8269b26eaf0dc58781bde087b0984646602021d08" dependencies = [ - "async-lock 3.3.0", + "async-lock 3.4.0", "async-trait", "crossbeam-channel", "crossbeam-epoch", "crossbeam-utils", - "event-listener 5.3.0", + "event-listener 5.3.1", "futures-util", "once_cell", "parking_lot", @@ -3353,9 +3287,9 @@ dependencies = [ [[package]] name = "object" -version = "0.32.2" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +checksum = "b8ec7ab813848ba4522158d5517a6093db1ded27575b070f4177b8d12b41db5e" dependencies = [ "memchr", ] @@ -3606,9 +3540,9 @@ checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" [[package]] name = "parking_lot" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -3716,7 +3650,7 @@ dependencies = [ "bincode", "either", "fnv", - "itertools 0.12.1", + "itertools 0.10.5", "lazy_static", "nom", "quick-xml", @@ -3768,9 +3702,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "piper" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "464db0c665917b13ebb5d453ccdec4add5658ee1adc7affc7677615356a8afaf" +checksum = "ae1d5c74c9876f070d3e8fd503d748c7d974c3e48da8f41350fa5222ef9b4391" dependencies = [ "atomic-waker", "fastrand 2.1.0", @@ -3823,9 +3757,9 @@ dependencies = [ [[package]] name = "polling" -version = "3.7.0" +version = "3.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645493cf344456ef24219d02a768cf1fb92ddf8c92161679ae3d91b91a637be3" +checksum = "5e6a007746f34ed64099e88783b0ae369eaa3da6392868ba262e2af9b8fbaea1" dependencies = [ "cfg-if", "concurrent-queue", @@ -3959,7 +3893,7 @@ checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes", "heck", - "itertools 0.12.1", + "itertools 0.10.5", "log", "multimap", "once_cell", @@ -3979,7 +3913,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools 0.12.1", + "itertools 0.10.5", "proc-macro2", "quote", "syn 2.0.66", @@ -3992,7 +3926,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f5eec97d5d34bdd17ad2db2219aabf46b054c6c41bd5529767c9ce55be5898f" dependencies = [ "base64 0.22.1", - "logos 0.14.0", + "logos", "miette 7.2.0", "once_cell", "prost", @@ -4119,9 +4053,9 @@ checksum = "9653c3ed92974e34c5a6e0a510864dab979760481714c172e0a34e437cb98804" [[package]] name = "protox" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a29b3c5596eb23a849deba860b53ffd468199d9ad5fe4402a7d55379e16aa2d2" +checksum = "ac532509cee918d40f38c3e12f8ef9230f215f017d54de7dd975015538a42ce7" dependencies = [ "bytes", "miette 7.2.0", @@ -4134,11 +4068,11 @@ dependencies = [ [[package]] name = "protox-parse" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "033b939d76d358f7c32120c86c71f515bae45e64f2bde455200356557276276c" +checksum = "7f6c33f43516fe397e2f930779d720ca12cd057f7da4cd6326a0ef78d69dee96" dependencies = [ - "logos 0.13.0", + "logos", "miette 7.2.0", "prost-types", "thiserror", @@ -4310,8 +4244,8 @@ checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.6", - "regex-syntax 0.8.3", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", ] [[package]] @@ -4325,13 +4259,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.3", + "regex-syntax 0.8.4", ] [[package]] @@ -4354,9 +4288,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "relative-path" @@ -4565,9 +4499,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.9" +version = "0.23.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a218f0f6d05669de4eabfb24f31ce802035c952429d037507b4a4a39f0e60c5b" +checksum = "05cff451f60db80f490f3c182b77c35260baace73209e9cdbbe526bfe3a4d402" dependencies = [ "once_cell", "rustls-pki-types", @@ -4667,9 +4601,9 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0218ceea14babe24a4a5836f86ade86c1effbc198164e619194cb5069187e29" +checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" dependencies = [ "dyn-clone", "schemars_derive", @@ -4679,9 +4613,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed5a1ccce8ff962e31a165d41f6e2a2dd1245099dc4d594f5574a86cd90f4d3" +checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e" dependencies = [ "proc-macro2", "quote", @@ -5314,7 +5248,7 @@ dependencies = [ "reqwest-middleware", "resource", "rquickjs", - "rustls 0.23.9", + "rustls 0.23.10", "rustls-pemfile 1.0.4", "rustls-pki-types", "schemars", @@ -6055,9 +5989,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859eb650cfee7434994602c3a68b25d77ad9e68c8a6cd491616ef86661382eb3" +checksum = "1b2cb4fbb9995eeb36ac86fadf24031ccd58f99d6b4b2d7b911db70bddb80d90" [[package]] name = "try-lock" @@ -6124,9 +6058,9 @@ checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "unicode-width" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" [[package]] name = "unicode-xid" @@ -6191,9 +6125,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "utf8parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" @@ -6529,9 +6463,9 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "749f0da9cc72d82e600d8d2e44cadd0b9eedb9038f71a1c58556ac1c5791813b" +checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" dependencies = [ "windows-targets 0.52.5", ] @@ -6901,9 +6835,9 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" [[package]] name = "zerovec" diff --git a/Cargo.toml b/Cargo.toml index 8c58baecdb..debb7f61a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -104,7 +104,6 @@ update-informer = { version = "1.1.0", default-features = false, features = [ lazy_static = { workspace = true } which = { version = "6.0.1", optional = true } async-recursion = "1.1.1" -tempfile = "3.10.1" rquickjs = { "version" = "0.5.1", optional = true, features = ["macro"] } strum_macros = "0.26.4" # TODO: disable some levels with features? diff --git a/README.md b/README.md index 7d898c9686..d180dda790 100644 --- a/README.md +++ b/README.md @@ -99,7 +99,7 @@ Head out to [docs] to learn about other powerful tailcall features. Your contributions are invaluable! Kindly go through our [contribution guidelines] if you are a first time contributor. -[contribution guidelines]: https://tailcall.run/developers/ +[contribution guidelines]: https://tailcall.run/docs/contribution-guidelines ### Support Us diff --git a/benches/impl_path_string_for_evaluation_context.rs b/benches/impl_path_string_for_evaluation_context.rs index 552c6f469d..b4356d8df8 100644 --- a/benches/impl_path_string_for_evaluation_context.rs +++ b/benches/impl_path_string_for_evaluation_context.rs @@ -175,20 +175,24 @@ fn to_bench_id(input: &[&str]) -> BenchmarkId { #[derive(Clone)] struct MockGraphqlContext; -impl<'a> ResolverContextLike<'a> for MockGraphqlContext { - fn value(&'a self) -> Option<&'a Value> { +impl ResolverContextLike for MockGraphqlContext { + fn value(&self) -> Option<&Value> { Some(&TEST_VALUES) } - fn args(&'a self) -> Option<&'a IndexMap> { + fn args(&self) -> Option<&IndexMap> { Some(&TEST_ARGS) } - fn field(&'a self) -> Option { + fn field(&self) -> Option { None } - fn add_error(&'a self, _: async_graphql::ServerError) {} + fn is_query(&self) -> bool { + false + } + + fn add_error(&self, _: async_graphql::ServerError) {} } // assert that everything was set up correctly for the benchmark diff --git a/generated/.tailcallrc.graphql b/generated/.tailcallrc.graphql index 44ab0a5a90..0d76bcf8ff 100644 --- a/generated/.tailcallrc.graphql +++ b/generated/.tailcallrc.graphql @@ -13,6 +13,13 @@ directive @addField( path: [String!] ) repeatable on OBJECT +""" +The @alias directive indicates that aliases of one enum value. +""" +directive @alias( + options: [String!] +) on ENUM_VALUE_DEFINITION + """ The @cache operator enables caching for the query, field or type it is applied to. """ @@ -237,6 +244,13 @@ directive @server( """ batchRequests: Boolean """ + Enables deduplication of IO operations to enhance performance.This flag prevents + duplicate IO requests from being executed concurrently, reducing resource load. Caution: + May lead to issues with APIs that expect unique results for identical inputs, such + as nonce-based APIs. + """ + dedupe: Boolean + """ `globalResponseTimeout` sets the maximum query duration before termination, acting as a safeguard against long-running queries. """ @@ -355,11 +369,6 @@ directive @upstream( """ connectTimeout: Int """ - When set to `true`, it will ensure no HTTP, GRPC, or any other IO call is made more - than once within the context of a single GraphQL request. - """ - dedupe: Boolean - """ The `http2Only` setting allows you to specify whether the client should always issue HTTP2 requests, without checking if the server supports it or not. By default it is set to `false` for all HTTP requests made by the server, but is automatically diff --git a/generated/.tailcallrc.schema.json b/generated/.tailcallrc.schema.json index 2757a90846..78e4fa03b0 100644 --- a/generated/.tailcallrc.schema.json +++ b/generated/.tailcallrc.schema.json @@ -93,6 +93,22 @@ }, "additionalProperties": false }, + "Alias": { + "description": "The @alias directive indicates that aliases of one enum value.", + "type": "object", + "required": [ + "options" + ], + "properties": { + "options": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + } + }, "Apollo": { "type": "object", "required": [ @@ -361,7 +377,7 @@ "variants": { "type": "array", "items": { - "type": "string" + "$ref": "#/definitions/Variant" }, "uniqueItems": true } @@ -411,6 +427,9 @@ } ] }, + "default_value": { + "description": "Stores the default value for the field" + }, "doc": { "description": "Publicly visible documentation for the field.", "type": [ @@ -494,6 +513,7 @@ }, "protected": { "description": "Marks field as protected by auth provider", + "default": null, "anyOf": [ { "$ref": "#/definitions/Protected" @@ -1023,6 +1043,13 @@ "null" ] }, + "dedupe": { + "description": "Enables deduplication of IO operations to enhance performance.\n\nThis flag prevents duplicate IO requests from being executed concurrently, reducing resource load. Caution: May lead to issues with APIs that expect unique results for identical inputs, such as nonce-based APIs.", + "type": [ + "boolean", + "null" + ] + }, "globalResponseTimeout": { "description": "`globalResponseTimeout` sets the maximum query duration before termination, acting as a safeguard against long-running queries.", "type": [ @@ -1308,6 +1335,7 @@ }, "protected": { "description": "Marks field as protected by auth providers", + "default": null, "anyOf": [ { "$ref": "#/definitions/Protected" @@ -1428,13 +1456,6 @@ "format": "uint64", "minimum": 0.0 }, - "dedupe": { - "description": "When set to `true`, it will ensure no HTTP, GRPC, or any other IO call is made more than once within the context of a single GraphQL request.", - "type": [ - "boolean", - "null" - ] - }, "http2Only": { "description": "The `http2Only` setting allows you to specify whether the client should always issue HTTP2 requests, without checking if the server supports it or not. By default it is set to `false` for all HTTP requests made by the server, but is automatically set to true for GRPC.", "type": [ @@ -1553,6 +1574,28 @@ } } }, + "Variant": { + "description": "Definition of GraphQL value", + "type": "object", + "required": [ + "name" + ], + "properties": { + "alias": { + "anyOf": [ + { + "$ref": "#/definitions/Alias" + }, + { + "type": "null" + } + ] + }, + "name": { + "type": "string" + } + } + }, "schema": { "oneOf": [ { diff --git a/npm/gen-root.ts b/npm/gen-root.ts index 1c612b71a4..8b64bb2c19 100644 --- a/npm/gen-root.ts +++ b/npm/gen-root.ts @@ -9,10 +9,12 @@ const __dirname = dirname(fileURLToPath(import.meta.url)) interface ICLI { version: string + name: string } const options = parse({ version: {alias: "v", type: String}, + name: {alias: "n", type: String}, }) async function getBuildDefinitions(): Promise { @@ -23,6 +25,7 @@ async function getBuildDefinitions(): Promise { async function genServerPackage(buildDefinitions: string[]) { const packageVersion = options.version || "0.1.0" + const name = options.name || "@tailcallhq/tailcall" console.log(`Generating package.json with version ${packageVersion}`) @@ -43,7 +46,7 @@ async function genServerPackage(buildDefinitions: string[]) { repository: repository!, homepage: homepage!, keywords: keywords!, - name: "@tailcallhq/tailcall", + name: name, type: "module", version: packageVersion, optionalDependencies, diff --git a/npm/package-lock.json b/npm/package-lock.json index 4081a8c55d..d2af0e22ee 100644 --- a/npm/package-lock.json +++ b/npm/package-lock.json @@ -825,9 +825,9 @@ } }, "node_modules/tsx": { - "version": "4.15.2", - "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.15.2.tgz", - "integrity": "sha512-kIZTOCmR37nEw0qxQks2dR+eZWSXydhTGmz7yx94vEiJtJGBTkUl0D/jt/5fey+CNdm6i3Cp+29WKRay9ScQUw==", + "version": "4.15.5", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.15.5.tgz", + "integrity": "sha512-iKi8jQ2VBmZ2kU/FkGkL2OSHBHsazsUzsdC/W/RwhKIEsIoZ1alCclZHP5jGfNHEaEWUJFM1GquzCf+4db3b0w==", "dev": true, "license": "MIT", "dependencies": { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 2c1a03c1ae..39235a1148 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.78" +channel = "1.79" profile = "default" diff --git a/src/cli/runtime/file.rs b/src/cli/runtime/file.rs index 92157915ce..4bcd064938 100644 --- a/src/cli/runtime/file.rs +++ b/src/cli/runtime/file.rs @@ -51,37 +51,8 @@ impl FileIO for NativeFileIO { #[cfg(test)] mod tests { - use tempfile::NamedTempFile; - use super::*; - #[tokio::test] - async fn test_write_and_read_file() { - // Setup - Create a temporary file - let tmp_file = NamedTempFile::new().expect("Failed to create temp file"); - let tmp_path = tmp_file - .path() - .to_str() - .expect("Failed to get temp file path"); - let file_io = NativeFileIO::init(); - - // Test writing to the file - let content = b"Hello, world!"; - file_io - .write(tmp_path, content) - .await - .expect("Failed to write to temp file"); - - // Test reading from the file - let read_content = file_io - .read(tmp_path) - .await - .expect("Failed to read from temp file"); - - // Verify the content is as expected - assert_eq!(read_content, String::from_utf8_lossy(content)); - } - #[tokio::test] async fn test_write_error() { // Attempt to write to an invalid path diff --git a/src/core/app_context.rs b/src/core/app_context.rs index 85b394099a..c85947dd91 100644 --- a/src/core/app_context.rs +++ b/src/core/app_context.rs @@ -1,17 +1,19 @@ use std::sync::Arc; use async_graphql::dynamic::{self, DynamicRequest}; -use async_graphql::Response; +use async_graphql_value::ConstValue; +use hyper::body::Bytes; +use crate::core::async_graphql_hyper::OperationId; use crate::core::auth::context::GlobalAuthContext; use crate::core::blueprint::Type::ListType; use crate::core::blueprint::{Blueprint, Definition, SchemaModifiers}; -use crate::core::data_loader::DataLoader; +use crate::core::data_loader::{DataLoader, DedupeResult}; use crate::core::graphql::GraphqlDataLoader; use crate::core::grpc; use crate::core::grpc::data_loader::GrpcDataLoader; -use crate::core::http::{DataLoaderRequest, HttpDataLoader}; -use crate::core::ir::{DataLoaderId, IO, IR}; +use crate::core::http::{DataLoaderRequest, HttpDataLoader, Response}; +use crate::core::ir::{DataLoaderId, Error, IoId, IO, IR}; use crate::core::rest::{Checked, EndpointSet}; use crate::core::runtime::TargetRuntime; @@ -24,6 +26,8 @@ pub struct AppContext { pub grpc_data_loaders: Arc>>, pub endpoints: EndpointSet, pub auth_ctx: Arc, + pub dedupe_handler: Arc>, + pub dedupe_operation_handler: DedupeResult, Error>, } impl AppContext { @@ -128,10 +132,12 @@ impl AppContext { grpc_data_loaders: Arc::new(grpc_data_loaders), endpoints, auth_ctx: Arc::new(auth_ctx), + dedupe_handler: Arc::new(DedupeResult::new(false)), + dedupe_operation_handler: DedupeResult::new(false), } } - pub async fn execute(&self, request: impl Into) -> Response { + pub async fn execute(&self, request: impl Into) -> async_graphql::Response { self.schema.execute(request).await } } diff --git a/src/core/async_cache.rs b/src/core/async_cache.rs deleted file mode 100644 index 042fdb05f4..0000000000 --- a/src/core/async_cache.rs +++ /dev/null @@ -1,178 +0,0 @@ -use std::collections::HashMap; -use std::fmt::Debug; -use std::hash::Hash; -use std::pin::Pin; -use std::sync::{Arc, RwLock}; - -use futures_util::Future; -use tokio::sync::broadcast::Sender; - -/// A simple async cache that uses a `HashMap` to store the values. -pub struct AsyncCache { - cache: Arc>>>, -} - -#[derive(Clone)] -pub enum CacheValue { - Pending(Sender>>), - Ready(Arc>), -} - -impl - Default for AsyncCache -{ - fn default() -> Self { - Self::new() - } -} - -impl - AsyncCache -{ - pub fn new() -> Self { - Self { cache: Arc::new(RwLock::new(HashMap::new())) } - } - - fn get_cache_value(&self, key: &Key) -> Option> { - self.cache.read().unwrap().get(key).cloned() - } - - pub async fn get_or_eval<'a>( - &self, - key: Key, - or_else: impl FnOnce() -> Pin> + 'a + Send>> + Send, - ) -> Arc> { - if let Some(cache_value) = self.get_cache_value(&key) { - match cache_value { - CacheValue::Pending(tx) => tx.subscribe().recv().await.unwrap(), - CacheValue::Ready(value) => value, - } - } else { - let (tx, _) = tokio::sync::broadcast::channel(100); - self.cache - .write() - .unwrap() - .insert(key.clone(), CacheValue::Pending(tx.clone())); - let result = Arc::new(or_else().await); - let mut guard = self.cache.write().unwrap(); - if let Some(cache_value) = guard.get_mut(&key) { - *cache_value = CacheValue::Ready(result.clone()) - } - tx.send(result.clone()).ok(); - result - } - } -} - -#[cfg(test)] -mod tests { - use pretty_assertions::assert_eq; - - use super::*; - - #[tokio::test] - async fn test_no_key() { - let cache = AsyncCache::::new(); - let actual = cache - .get_or_eval(1, || Box::pin(async { Ok(1) })) - .await - .as_ref() - .clone() - .unwrap(); - assert_eq!(actual, 1); - } - - #[tokio::test] - async fn test_with_key() { - let cache = AsyncCache::::new(); - cache - .get_or_eval(1, || Box::pin(async { Ok(1) })) - .await - .as_ref() - .clone() - .unwrap(); - - let actual = cache - .get_or_eval(1, || Box::pin(async { Ok(2) })) - .await - .as_ref() - .clone() - .unwrap(); - assert_eq!(actual, 1); - } - - #[tokio::test] - async fn test_with_multi_get() { - let cache = AsyncCache::::new(); - - for i in 0..100 { - cache - .get_or_eval(1, || Box::pin(async move { Ok(i) })) - .await - .as_ref() - .clone() - .unwrap(); - } - - let actual = cache - .get_or_eval(1, || Box::pin(async { Ok(2) })) - .await - .as_ref() - .clone() - .unwrap(); - assert_eq!(actual, 0); - } - - #[tokio::test] - async fn test_with_failure() { - let cache = AsyncCache::::new(); - let actual = cache - .get_or_eval(1, || Box::pin(async { Err("error".into()) })) - .await; - assert!(actual.is_err()); - } - - #[tokio::test] - async fn test_with_multi_get_failure() { - let cache = AsyncCache::::new(); - let _ = cache - .get_or_eval(1, || Box::pin(async { Err("error".into()) })) - .await; - - let actual = cache.get_or_eval(1, || Box::pin(async { Ok(2) })).await; - - assert!(actual.is_err()); - } - - #[tokio::test] - async fn test_concurrent_access() { - let cache = Arc::new(AsyncCache::::new()); - let key = 1; - let value = 42; - // Simulate concurrent access by spawning multiple tasks. - let handles: Vec<_> = (0..100) - .map(|_| { - let cache = cache.clone(); - tokio::spawn(async move { - cache - .get_or_eval(key, || Box::pin(async { Ok(value) })) - .await - }) - }) - .collect(); - - // Await all spawned tasks and collect their results. - let results: Vec<_> = futures_util::future::join_all(handles) - .await - .into_iter() - .map(|res| res.unwrap().as_ref().clone().unwrap()) // Unwrap the Result from the join, and the Result from get_or_eval - .collect(); - - // Check that all tasks received the correct value. - assert!(results.iter().all(|&v| v == value)); - - // Optionally, verify that the value was computed only once. - // This might require additional instrumentation in the cache or the - // computation function. - } -} diff --git a/src/core/async_graphql_hyper.rs b/src/core/async_graphql_hyper.rs index b3d792961c..7602dbc401 100644 --- a/src/core/async_graphql_hyper.rs +++ b/src/core/async_graphql_hyper.rs @@ -1,28 +1,71 @@ use std::any::Any; +use std::hash::{Hash, Hasher}; -use async_graphql::parser::types::ExecutableDocument; +use async_graphql::parser::types::{ExecutableDocument, OperationType}; use async_graphql::{BatchResponse, Executor, Value}; +use headers::HeaderMap; use hyper::header::{HeaderValue, CACHE_CONTROL, CONTENT_TYPE}; use hyper::{Body, Response, StatusCode}; use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; +use tailcall_hasher::TailcallHasher; + +#[derive(PartialEq, Eq, Clone, Hash, Debug)] +pub struct OperationId(u64); use crate::core::error::Error; #[async_trait::async_trait] -pub trait GraphQLRequestLike { +pub trait GraphQLRequestLike: Hash + Send { fn data(self, data: D) -> Self; async fn execute(self, executor: &E) -> GraphQLResponse where E: Executor; fn parse_query(&mut self) -> Option<&ExecutableDocument>; + + fn is_query(&mut self) -> bool { + self.parse_query() + .map(|a| { + let mut is_query = false; + for (_, operation) in a.operations.iter() { + is_query = operation.node.ty == OperationType::Query; + } + is_query + }) + .unwrap_or(false) + } + + fn operation_id(&self, headers: &HeaderMap) -> OperationId { + let mut hasher = TailcallHasher::default(); + let state = &mut hasher; + for (name, value) in headers.iter() { + name.hash(state); + value.hash(state); + } + self.hash(state); + OperationId(hasher.finish()) + } } #[derive(Debug, Deserialize)] pub struct GraphQLBatchRequest(pub async_graphql::BatchRequest); impl GraphQLBatchRequest {} - +impl Hash for GraphQLBatchRequest { + //TODO: Fix Hash implementation for BatchRequest, which should ideally batch + // execution of individual requests instead of the whole chunk of requests as + // one. + fn hash(&self, state: &mut H) { + for request in self.0.iter() { + request.query.hash(state); + request.operation_name.hash(state); + for (name, value) in request.variables.iter() { + name.hash(state); + value.to_string().hash(state); + } + } + } +} #[async_trait::async_trait] impl GraphQLRequestLike for GraphQLBatchRequest { fn data(mut self, data: D) -> Self { @@ -48,7 +91,16 @@ impl GraphQLRequestLike for GraphQLBatchRequest { pub struct GraphQLRequest(pub async_graphql::Request); impl GraphQLRequest {} - +impl Hash for GraphQLRequest { + fn hash(&self, state: &mut H) { + self.0.query.hash(state); + self.0.operation_name.hash(state); + for (name, value) in self.0.variables.iter() { + name.hash(state); + value.to_string().hash(state); + } + } +} #[async_trait::async_trait] impl GraphQLRequestLike for GraphQLRequest { #[must_use] diff --git a/src/core/blueprint/blueprint.rs b/src/core/blueprint/blueprint.rs index 503a4eda86..a0776515d7 100644 --- a/src/core/blueprint/blueprint.rs +++ b/src/core/blueprint/blueprint.rs @@ -40,6 +40,7 @@ impl Default for Type { } impl Type { + /// gets the name of the type pub fn name(&self) -> &str { match self { Type::NamedType { name, .. } => name, @@ -69,6 +70,7 @@ pub enum Definition { Union(UnionTypeDefinition), } impl Definition { + /// gets the name of the definition pub fn name(&self) -> &str { match self { Definition::Interface(def) => &def.name, @@ -141,6 +143,7 @@ pub struct FieldDefinition { pub resolver: Option, pub directives: Vec, pub description: Option, + pub default_value: Option, } impl FieldDefinition { diff --git a/src/core/blueprint/definitions.rs b/src/core/blueprint/definitions.rs index 50a0897f04..f056159559 100644 --- a/src/core/blueprint/definitions.rs +++ b/src/core/blueprint/definitions.rs @@ -41,7 +41,7 @@ pub fn to_input_object_type_definition( .map(|field| InputFieldDefinition { name: field.name.clone(), description: field.description.clone(), - default_value: None, + default_value: field.default_value.clone(), of_type: field.of_type.clone(), }) .collect(), @@ -236,8 +236,8 @@ fn to_enum_type_definition((name, eu): (&String, &Enum)) -> Definition { .iter() .map(|variant| EnumValueDefinition { description: None, - name: variant.clone(), - directives: Vec::new(), + name: variant.name.clone(), + directives: vec![], }) .collect(), }) @@ -279,6 +279,7 @@ fn update_args<'a>( of_type: to_type(*field, None), directives: Vec::new(), resolver: None, + default_value: field.default_value.clone(), }) }, ) @@ -510,6 +511,7 @@ pub fn to_field_definition( .and(fix_dangling_resolvers()) .and(update_cache_resolvers()) .and(update_protected(object_name).trace(Protected::trace_name().as_str())) + .and(update_enum_alias()) .try_fold( &(config_module, field, type_of, name), FieldDefinition::default(), diff --git a/src/core/blueprint/from_config.rs b/src/core/blueprint/from_config.rs index de9a15b020..2207d550dd 100644 --- a/src/core/blueprint/from_config.rs +++ b/src/core/blueprint/from_config.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeMap, HashMap}; +use std::collections::{BTreeMap, BTreeSet, HashMap}; use async_graphql::dynamic::SchemaBuilder; @@ -94,7 +94,13 @@ where } JsonSchema::Obj(schema_fields) } else if let Some(type_enum_) = type_enum_ { - JsonSchema::Enum(type_enum_.variants.to_owned()) + JsonSchema::Enum( + type_enum_ + .variants + .iter() + .map(|variant| variant.name.clone()) + .collect::>(), + ) } else { match type_of { "String" => JsonSchema::Str, diff --git a/src/core/blueprint/into_schema.rs b/src/core/blueprint/into_schema.rs index 4c37d016fa..126c408faa 100644 --- a/src/core/blueprint/into_schema.rs +++ b/src/core/blueprint/into_schema.rs @@ -34,6 +34,29 @@ fn to_type_ref(type_of: &Type) -> dynamic::TypeRef { } } +/// We set the default value for an `InputValue` by reading it from the +/// blueprint and assigning it to the provided `InputValue` during the +/// generation of the `async_graphql::Schema`. The `InputValue` represents the +/// structure of arguments and their types that can be passed to a field. In +/// other GraphQL implementations, this is commonly referred to as +/// `InputValueDefinition`. +fn set_default_value( + input_value: dynamic::InputValue, + value: Option, +) -> dynamic::InputValue { + if let Some(value) = value { + match ConstValue::from_json(value) { + Ok(const_value) => input_value.default_value(const_value), + Err(err) => { + tracing::warn!("conversion from serde_json::Value to ConstValue failed for default_value with error {err:?}"); + input_value + } + } + } else { + input_value + } +} + fn to_type(def: &Definition) -> dynamic::Type { match def { Definition::Object(def) => { @@ -46,6 +69,11 @@ fn to_type(def: &Definition) -> dynamic::Type { field_name, type_ref.clone(), move |ctx| { + // region: HOT CODE + // -------------------------------------------------- + // HOT CODE STARTS HERE + // -------------------------------------------------- + let req_ctx = ctx.ctx.data::>().unwrap(); let field_name = &field.name; @@ -66,10 +94,12 @@ fn to_type(def: &Definition) -> dynamic::Type { FieldFuture::new( async move { let ctx: ResolverContext = ctx.into(); - let ctx = EvaluationContext::new(req_ctx, &ctx); + let mut ctx = EvaluationContext::new(req_ctx, &ctx); - let const_value = - expr.eval(ctx).await.map_err(|err| err.extend())?; + let const_value = expr + .eval(&mut ctx) + .await + .map_err(|err| err.extend())?; let p = match const_value { ConstValue::List(a) => Some(FieldValue::list(a)), ConstValue::Null => FieldValue::NONE, @@ -82,15 +112,20 @@ fn to_type(def: &Definition) -> dynamic::Type { ) } } + + // -------------------------------------------------- + // HOT CODE ENDS HERE + // -------------------------------------------------- + // endregion: hot_code }, ); if let Some(description) = &field.description { dyn_schema_field = dyn_schema_field.description(description); } for arg in field.args.iter() { - dyn_schema_field = dyn_schema_field.argument(dynamic::InputValue::new( - arg.name.clone(), - to_type_ref(&arg.of_type), + dyn_schema_field = dyn_schema_field.argument(set_default_value( + dynamic::InputValue::new(arg.name.clone(), to_type_ref(&arg.of_type)), + arg.default_value.clone(), )); } object = object.field(dyn_schema_field); @@ -123,6 +158,7 @@ fn to_type(def: &Definition) -> dynamic::Type { if let Some(description) = &field.description { input_field = input_field.description(description); } + let input_field = set_default_value(input_field, field.default_value.clone()); input_object = input_object.field(input_field); } if let Some(description) = &def.description { diff --git a/src/core/blueprint/operators/enum_alias.rs b/src/core/blueprint/operators/enum_alias.rs new file mode 100644 index 0000000000..b358a395ca --- /dev/null +++ b/src/core/blueprint/operators/enum_alias.rs @@ -0,0 +1,37 @@ +use std::collections::HashMap; + +use crate::core::blueprint::*; +use crate::core::config; +use crate::core::config::Field; +use crate::core::ir::{Map, IR}; +use crate::core::try_fold::TryFold; +use crate::core::valid::Valid; + +pub fn update_enum_alias<'a>( +) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String> +{ + TryFold::<(&ConfigModule, &Field, &config::Type, &'a str), FieldDefinition, String>::new( + |(config, field, _, _), mut b_field| { + let enum_type = config.enums.get(&field.type_of); + if let Some(enum_type) = enum_type { + let has_alias = enum_type.variants.iter().any(|v| v.alias.is_some()); + if !has_alias { + return Valid::succeed(b_field); + } + let mut map = HashMap::::new(); + for v in enum_type.variants.iter() { + map.insert(v.name.clone(), v.name.clone()); + if let Some(alias) = &v.alias { + for option in &alias.options { + map.insert(option.to_owned(), v.name.clone()); + } + } + } + b_field.resolver = b_field + .resolver + .map(|r| IR::Map(Map { input: Box::new(r), map })); + } + Valid::succeed(b_field) + }, + ) +} diff --git a/src/core/blueprint/operators/http.rs b/src/core/blueprint/operators/http.rs index 48a18d3a1a..6e509a1403 100644 --- a/src/core/blueprint/operators/http.rs +++ b/src/core/blueprint/operators/http.rs @@ -10,7 +10,6 @@ use crate::core::{config, helpers}; pub fn compile_http( config_module: &config::ConfigModule, - field: &config::Field, http: &config::Http, ) -> Valid { Valid::<(), String>::fail("GroupBy is only supported for GET requests".to_string()) @@ -42,15 +41,11 @@ pub fn compile_http( .iter() .map(|key_value| (key_value.key.clone(), key_value.value.clone())) .collect(); - let output_schema = to_json_schema_for_field(field, config_module); - let input_schema = to_json_schema_for_args(&field.args, config_module); RequestTemplate::try_from( Endpoint::new(base_url.to_string()) .method(http.method.clone()) .query(query) - .output(output_schema) - .input(input_schema) .body(http.body.clone()) .encoding(http.encoding.clone()), ) @@ -88,7 +83,7 @@ pub fn update_http<'a>( return Valid::succeed(b_field); }; - compile_http(config_module, field, http) + compile_http(config_module, http) .map(|resolver| b_field.resolver(Some(resolver))) .and_then(|b_field| { b_field diff --git a/src/core/blueprint/operators/mod.rs b/src/core/blueprint/operators/mod.rs index 166d71f219..7ca9bf0e1a 100644 --- a/src/core/blueprint/operators/mod.rs +++ b/src/core/blueprint/operators/mod.rs @@ -1,4 +1,5 @@ mod call; +mod enum_alias; mod expr; mod graphql; mod grpc; @@ -8,6 +9,7 @@ mod modify; mod protected; pub use call::*; +pub use enum_alias::*; pub use expr::*; pub use graphql::*; pub use grpc::*; diff --git a/src/core/blueprint/server.rs b/src/core/blueprint/server.rs index b6c802911e..25cf5a21e5 100644 --- a/src/core/blueprint/server.rs +++ b/src/core/blueprint/server.rs @@ -36,6 +36,7 @@ pub struct Server { pub cors: Option, pub experimental_headers: HashSet, pub auth: Option, + pub dedupe: bool, } /// Mimic of mini_v8::Script that's wasm compatible @@ -150,6 +151,7 @@ impl TryFrom for Server { script, cors, auth, + dedupe: config_server.get_dedupe(), } }, ) diff --git a/src/core/blueprint/upstream.rs b/src/core/blueprint/upstream.rs index 0fc2bd5cc6..75a90f5ceb 100644 --- a/src/core/blueprint/upstream.rs +++ b/src/core/blueprint/upstream.rs @@ -27,7 +27,6 @@ pub struct Upstream { pub http_cache: u64, pub batch: Option, pub http2_only: bool, - pub dedupe: bool, pub on_request: Option, } @@ -82,7 +81,6 @@ impl TryFrom<&ConfigModule> for Upstream { http_cache: (config_upstream).get_http_cache_size(), batch, http2_only: (config_upstream).get_http_2_only(), - dedupe: (config_upstream).get_dedupe(), on_request: (config_upstream).get_on_request(), }) .to_result() diff --git a/src/core/config/config.rs b/src/core/config/config.rs index 4585cb6116..4aba6c29c3 100644 --- a/src/core/config/config.rs +++ b/src/core/config/config.rs @@ -262,10 +262,16 @@ pub struct Field { /// /// Sets the cache configuration for a field pub cache: Option, + /// /// Marks field as protected by auth provider #[serde(default)] pub protected: Option, + + /// + /// Stores the default value for the field + #[serde(default, skip_serializing_if = "is_default")] + pub default_value: Option, } // It's a terminal implementation of MergeRight @@ -397,10 +403,46 @@ pub struct Union { #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, schemars::JsonSchema, MergeRight)] /// Definition of GraphQL enum type pub struct Enum { - pub variants: BTreeSet, + pub variants: BTreeSet, pub doc: Option, } +/// Definition of GraphQL value +#[derive( + Serialize, + Deserialize, + Clone, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + schemars::JsonSchema, + MergeRight, +)] +pub struct Variant { + pub name: String, + // directive: alias + pub alias: Option, +} + +/// The @alias directive indicates that aliases of one enum value. +#[derive( + Serialize, + Deserialize, + Clone, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + schemars::JsonSchema, + MergeRight, +)] +pub struct Alias { + pub options: BTreeSet, +} + #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq, schemars::JsonSchema)] #[serde(deny_unknown_fields)] /// The @http operator indicates that a field or node is backed by a REST API. diff --git a/src/core/config/from_document.rs b/src/core/config/from_document.rs index 01896659d2..576a0a7647 100644 --- a/src/core/config/from_document.rs +++ b/src/core/config/from_document.rs @@ -1,4 +1,4 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; use async_graphql::parser::types::{ BaseType, ConstDirective, EnumType, FieldDefinition, InputObjectType, InputValueDefinition, @@ -7,15 +7,16 @@ use async_graphql::parser::types::{ }; use async_graphql::parser::Positioned; use async_graphql::Name; +use async_graphql_value::ConstValue; use super::telemetry::Telemetry; -use super::{Tag, JS}; +use super::{Alias, Tag, JS}; use crate::core::config::{ self, Cache, Call, Config, Enum, GraphQL, Grpc, Link, Modify, Omit, Protected, RootSchema, - Server, Union, Upstream, + Server, Union, Upstream, Variant, }; use crate::core::directive::DirectiveCodec; -use crate::core::valid::{Valid, Validator}; +use crate::core::valid::{Valid, ValidationError, Validator}; const DEFAULT_SCHEMA_DEFINITION: &SchemaDefinition = &SchemaDefinition { extend: false, @@ -208,26 +209,22 @@ fn to_union_types( fn to_enum_types( type_definitions: &[&Positioned], ) -> Valid, String> { - Valid::succeed( - type_definitions - .iter() - .filter_map(|type_definition| { - let type_name = pos_name_to_string(&type_definition.node.name); - let type_opt = match type_definition.node.kind.clone() { - TypeKind::Enum(enum_type) => to_enum( - enum_type, - type_definition - .node - .description - .to_owned() - .map(|pos| pos.node), - ), - _ => return None, - }; - Some((type_name, type_opt)) - }) - .collect(), - ) + Valid::from_iter(type_definitions.iter(), |type_definition| { + let type_name = pos_name_to_string(&type_definition.node.name); + let type_opt = match type_definition.node.kind.clone() { + TypeKind::Enum(enum_type) => to_enum( + enum_type, + type_definition + .node + .description + .to_owned() + .map(|pos| pos.node), + ), + _ => return Valid::succeed(None), + }; + type_opt.map(|type_opt| Some((type_name, type_opt))) + }) + .map(|values| values.into_iter().flatten().collect()) } #[allow(clippy::too_many_arguments)] @@ -291,23 +288,36 @@ fn to_input_object_fields( to_fields_inner(input_object_fields, to_input_object_field) } fn to_field(field_definition: &FieldDefinition) -> Valid { - to_common_field(field_definition, to_args(field_definition)) + to_common_field(field_definition, to_args(field_definition), None) } fn to_input_object_field(field_definition: &InputValueDefinition) -> Valid { - to_common_field(field_definition, BTreeMap::new()) + to_common_field( + field_definition, + BTreeMap::new(), + field_definition + .default_value + .as_ref() + .map(|f| f.node.clone()), + ) } fn to_common_field( field: &F, args: BTreeMap, + default_value: Option, ) -> Valid where - F: Fieldlike, + F: FieldLike, { let type_of = field.type_of(); let base = &type_of.base; let nullable = &type_of.nullable; let description = field.description(); let directives = field.directives(); + let default_value = default_value + .map(ConstValue::into_json) + .transpose() + .map_err(|err| ValidationError::new(err.to_string())) + .into(); let type_of = to_type_of(type_of); let list = matches!(&base, BaseType::List(_)); @@ -322,8 +332,9 @@ where .fuse(JS::from_directives(directives.iter())) .fuse(Call::from_directives(directives.iter())) .fuse(Protected::from_directives(directives.iter())) + .fuse(default_value) .map( - |(http, graphql, cache, grpc, omit, modify, script, call, protected)| { + |(http, graphql, cache, grpc, omit, modify, script, call, protected, default_value)| { let const_field = to_const_field(directives); config::Field { type_of, @@ -342,6 +353,7 @@ where cache, call, protected, + default_value, } }, ) @@ -394,13 +406,21 @@ fn to_union(union_type: UnionType, doc: &Option) -> Union { Union { types, doc: doc.clone() } } -fn to_enum(enum_type: EnumType, doc: Option) -> Enum { - let variants = enum_type - .values - .iter() - .map(|member| member.node.value.node.as_str().to_owned()) - .collect(); - Enum { variants, doc } +fn to_enum(enum_type: EnumType, doc: Option) -> Valid { + let variants = Valid::from_iter(enum_type.values.iter(), |member| { + let name = member.node.value.node.as_str().to_owned(); + let alias = member + .node + .directives + .iter() + .find(|d| d.node.name.node.as_str() == Alias::directive_name()); + if let Some(alias) = alias { + Alias::from_directive(&alias.node).map(|alias| Variant { name, alias: Some(alias) }) + } else { + Valid::succeed(Variant { name, alias: None }) + } + }); + variants.map(|v| Enum { variants: v.into_iter().collect::>(), doc }) } fn to_const_field(directives: &[Positioned]) -> Option { directives.iter().find_map(|directive| { @@ -445,12 +465,12 @@ impl HasName for InputValueDefinition { } } -trait Fieldlike { +trait FieldLike { fn type_of(&self) -> &Type; fn description(&self) -> &Option>; fn directives(&self) -> &[Positioned]; } -impl Fieldlike for FieldDefinition { +impl FieldLike for FieldDefinition { fn type_of(&self) -> &Type { &self.ty.node } @@ -461,7 +481,7 @@ impl Fieldlike for FieldDefinition { &self.directives } } -impl Fieldlike for InputValueDefinition { +impl FieldLike for InputValueDefinition { fn type_of(&self) -> &Type { &self.ty.node } diff --git a/src/core/config/into_document.rs b/src/core/config/into_document.rs index c9f3223007..bd2ce0f42c 100644 --- a/src/core/config/into_document.rs +++ b/src/core/config/into_document.rs @@ -9,6 +9,11 @@ use crate::core::directive::DirectiveCodec; fn pos(a: A) -> Positioned { Positioned::new(a, Pos::default()) } + +fn transform_default_value(value: Option) -> Option { + value.map(ConstValue::from_json).and_then(Result::ok) +} + fn config_document(config: &ConfigModule) -> ServiceDocument { let mut definitions = Vec::new(); let mut directives = vec![ @@ -112,7 +117,8 @@ fn config_document(config: &ConfigModule) -> ServiceDocument { name: pos(Name::new(name.clone())), ty: pos(Type { nullable: !field.required, base: base_type }), - default_value: None, + default_value: transform_default_value(field.default_value.clone()) + .map(pos), directives, }) }) @@ -167,10 +173,10 @@ fn config_document(config: &ConfigModule) -> ServiceDocument { name: pos(Name::new(name.clone())), ty: pos(Type { nullable: !arg.required, base: base_type }), - default_value: arg - .default_value - .clone() - .map(|v| pos(ConstValue::String(v.to_string()))), + default_value: transform_default_value( + arg.default_value.clone(), + ) + .map(pos), directives: Vec::new(), }) }) @@ -242,8 +248,11 @@ fn config_document(config: &ConfigModule) -> ServiceDocument { .map(|variant| { pos(EnumValueDefinition { description: None, - value: pos(Name::new(variant)), - directives: Vec::new(), + value: pos(Name::new(&variant.name)), + directives: variant + .alias + .clone() + .map_or(vec![], |v| vec![pos(v.to_directive())]), }) }) .collect(), diff --git a/src/core/config/server.rs b/src/core/config/server.rs index 53d57b9867..e336438ba1 100644 --- a/src/core/config/server.rs +++ b/src/core/config/server.rs @@ -29,6 +29,15 @@ pub struct Server { /// debugging. Use judiciously. @default `false`. pub batch_requests: Option, + #[serde(default, skip_serializing_if = "is_default")] + /// Enables deduplication of IO operations to enhance performance. + /// + /// This flag prevents duplicate IO requests from being executed + /// concurrently, reducing resource load. Caution: May lead to issues + /// with APIs that expect unique results for identical inputs, such as + /// nonce-based APIs. + pub dedupe: Option, + #[serde(default, skip_serializing_if = "is_default")] /// `headers` contains key-value pairs that are included as default headers /// in server responses, allowing for consistent header management across @@ -198,6 +207,10 @@ impl Server { pub fn get_pipeline_flush(&self) -> bool { self.pipeline_flush.unwrap_or(true) } + + pub fn get_dedupe(&self) -> bool { + self.dedupe.unwrap_or(false) + } } #[cfg(test)] diff --git a/src/core/config/transformer/snapshots/tailcall__core__config__transformer__type_name_generator__test__type_name_generator.snap b/src/core/config/transformer/snapshots/tailcall__core__config__transformer__type_name_generator__test__type_name_generator.snap new file mode 100644 index 0000000000..c24afeaddc --- /dev/null +++ b/src/core/config/transformer/snapshots/tailcall__core__config__transformer__type_name_generator__test__type_name_generator.snap @@ -0,0 +1,26 @@ +--- +source: src/core/config/transformer/type_name_generator.rs +expression: transformed_config.to_sdl() +--- +schema @server(hostname: "0.0.0.0", port: 8000) @upstream(baseURL: "http://example.typicode.com", httpCache: 42) { + query: Query +} + +type Color { + colors: [T3] + isColorPageExists: Boolean + isColorsImageAvailable: Boolean +} + +type F1 { + color: Color +} + +type Query { + f1: F1 @http(path: "/colors") +} + +type T3 { + hexCode: String + name: String +} diff --git a/src/core/config/transformer/type_name_generator.rs b/src/core/config/transformer/type_name_generator.rs index a81b2feced..92bb52a4fe 100644 --- a/src/core/config/transformer/type_name_generator.rs +++ b/src/core/config/transformer/type_name_generator.rs @@ -40,14 +40,16 @@ impl<'a> CandidateConvergence<'a> { if let Some((candidate_name, _)) = candidate_list .iter() .filter(|(candidate_name, _)| { - !converged_candidate_set.contains(candidate_name) - && !self.config.types.contains_key(*candidate_name) + let singularized_candidate_name = candidate_name.to_singular().to_pascal_case(); + !converged_candidate_set.contains(&singularized_candidate_name) + && !self.config.types.contains_key(&singularized_candidate_name) }) .max_by_key(|&(_, candidate)| (candidate.frequency, candidate.priority)) { let singularized_candidate_name = candidate_name.to_singular().to_pascal_case(); - finalized_candidates.insert(type_name.to_owned(), singularized_candidate_name); - converged_candidate_set.insert(candidate_name); + finalized_candidates + .insert(type_name.to_owned(), singularized_candidate_name.clone()); + converged_candidate_set.insert(singularized_candidate_name); } } @@ -178,4 +180,16 @@ mod test { Ok(()) } + + #[test] + fn test_type_name_generator() -> anyhow::Result<()> { + let config = Config::from_sdl(read_fixture(configs::NAME_GENERATION).as_str()) + .to_result() + .unwrap(); + + let transformed_config = TypeNameGenerator.transform(config).to_result().unwrap(); + insta::assert_snapshot!(transformed_config.to_sdl()); + + Ok(()) + } } diff --git a/src/core/config/upstream.rs b/src/core/config/upstream.rs index d8779e84e2..11b8c5d896 100644 --- a/src/core/config/upstream.rs +++ b/src/core/config/upstream.rs @@ -140,11 +140,6 @@ pub struct Upstream { /// The User-Agent header value to be used in HTTP requests. @default /// `Tailcall/1.0` pub user_agent: Option, - - #[serde(default, skip_serializing_if = "is_default")] - /// When set to `true`, it will ensure no HTTP, GRPC, or any other IO call - /// is made more than once within the context of a single GraphQL request. - pub dedupe: Option, } impl Upstream { @@ -197,10 +192,6 @@ impl Upstream { self.http2_only.unwrap_or(false) } - pub fn get_dedupe(&self) -> bool { - self.dedupe.unwrap_or(false) - } - pub fn get_on_request(&self) -> Option { self.on_request.clone() } diff --git a/src/core/data_loader/dedupe.rs b/src/core/data_loader/dedupe.rs new file mode 100644 index 0000000000..ba5dd5edfd --- /dev/null +++ b/src/core/data_loader/dedupe.rs @@ -0,0 +1,188 @@ +use std::collections::HashMap; +use std::hash::Hash; +use std::sync::{Arc, Mutex}; + +use futures_util::Future; +use tokio::sync::broadcast; + +pub trait Key: Send + Sync + Eq + Hash + Clone {} +impl Key for A {} + +pub trait Value: Send + Sync + Clone {} +impl Value for A {} + +pub struct Dedupe { + cache: Arc>>>, + size: usize, + persist: bool, +} + +enum State { + Value(Value), + Send(broadcast::Sender), +} + +enum Step { + Value(Value), + Recv(broadcast::Receiver), + Send(broadcast::Sender), +} + +impl Dedupe { + pub fn new(size: usize, persist: bool) -> Self { + Self { cache: Arc::new(Mutex::new(HashMap::new())), size, persist } + } + + pub async fn dedupe<'a, Fn, Fut>(&'a self, key: &'a K, or_else: Fn) -> V + where + Fn: FnOnce() -> Fut, + Fut: Future, + { + match self.step(key) { + Step::Value(value) => value, + Step::Recv(mut rx) => rx.recv().await.unwrap(), + Step::Send(tx) => { + let value = or_else().await; + let mut guard = self.cache.lock().unwrap(); + if self.persist { + guard.insert(key.to_owned(), State::Value(value.clone())); + } else { + guard.remove(key); + } + let _ = tx.send(value.clone()); + value + } + } + } + + fn step(&self, key: &K) -> Step { + let mut this = self.cache.lock().unwrap(); + match this.get(key) { + Some(state) => match state { + State::Value(value) => Step::Value(value.clone()), + State::Send(tx) => Step::Recv(tx.subscribe()), + }, + None => { + let (tx, _) = broadcast::channel(self.size); + this.insert(key.to_owned(), State::Send(tx.clone())); + Step::Send(tx.clone()) + } + } + } +} + +pub struct DedupeResult(Dedupe>); + +impl DedupeResult { + pub fn new(persist: bool) -> Self { + Self(Dedupe::new(1, persist)) + } +} + +impl DedupeResult { + pub async fn dedupe<'a, Fn, Fut>(&'a self, key: &'a K, or_else: Fn) -> Result + where + Fn: FnOnce() -> Fut, + Fut: Future>, + { + self.0.dedupe(key, or_else).await + } +} + +#[cfg(test)] +mod tests { + use std::sync::atomic::{AtomicUsize, Ordering}; + use std::time::Duration; + + use tokio::join; + use tokio::time::sleep; + + use super::*; + + #[tokio::test] + async fn test_no_key() { + let cache = Arc::new(Dedupe::::new(1000, true)); + let actual = cache.dedupe(&1, || Box::pin(async { 1 })).await; + pretty_assertions::assert_eq!(actual, 1); + } + + #[tokio::test] + async fn test_with_key() { + let cache = Arc::new(Dedupe::::new(1000, true)); + cache.dedupe(&1, || Box::pin(async { 1 })).await; + + let actual = cache.dedupe(&1, || Box::pin(async { 2 })).await; + pretty_assertions::assert_eq!(actual, 1); + } + + #[tokio::test] + async fn test_with_multi_get() { + let cache = Arc::new(Dedupe::::new(1000, true)); + + for i in 0..100 { + cache.dedupe(&1, || Box::pin(async move { i })).await; + } + + let actual = cache.dedupe(&1, || Box::pin(async { 2 })).await; + pretty_assertions::assert_eq!(actual, 0); + } + + #[tokio::test] + async fn test_with_multi_async_get() { + let cache = Arc::new(Dedupe::::new(1000, true)); + + let a = cache.dedupe(&1, || { + Box::pin(async move { + sleep(Duration::from_millis(1)).await; + 1 + }) + }); + let b = cache.dedupe(&1, || { + Box::pin(async move { + sleep(Duration::from_millis(1)).await; + 2 + }) + }); + let (a, b) = join!(a, b); + + pretty_assertions::assert_eq!(a, b); + } + + async fn compute_value(counter: Arc) -> String { + counter.fetch_add(1, Ordering::SeqCst); + sleep(Duration::from_millis(1)).await; + format!("value_{}", counter.load(Ordering::SeqCst)) + } + + #[tokio::test(worker_threads = 16, flavor = "multi_thread")] + async fn test_deadlock_scenario() { + let _ = tracing_subscriber::fmt(); + let cache = Arc::new(Dedupe::::new(1000, true)); + let key = 1; + let counter = Arc::new(AtomicUsize::new(0)); + let mut handles = Vec::new(); + + // Spawn multiple tasks to simulate concurrent access + for i in 0..1000000 { + let cache = cache.clone(); + let counter = counter.clone(); + let handle = tokio::task::spawn(async move { + let result = cache + .dedupe(&key, || Box::pin(compute_value(counter))) + .await; + (i, result) + }); + handles.push(handle); + } + // Await each task for any potential deadlocks + for handle in handles.into_iter() { + let _ = handle.await.unwrap(); + } + // Check that compute_value was called exactly once + assert_eq!( + counter.load(Ordering::SeqCst), + 1, + "compute_value was called more than once" + ); + } +} diff --git a/src/core/data_loader/mod.rs b/src/core/data_loader/mod.rs index 14749d9b76..231a030fa1 100644 --- a/src/core/data_loader/mod.rs +++ b/src/core/data_loader/mod.rs @@ -1,8 +1,10 @@ mod cache; mod data_loader; +mod dedupe; mod factory; mod loader; mod storage; pub use data_loader::DataLoader; +pub use dedupe::DedupeResult; pub use loader::Loader; diff --git a/src/core/document.rs b/src/core/document.rs index 3f9cfb85fd..a5de48bd09 100644 --- a/src/core/document.rs +++ b/src/core/document.rs @@ -174,7 +174,7 @@ fn print_type_def(type_def: &TypeDefinition) -> String { directives, en.values .iter() - .map(|v| format!(" {}", v.node.value)) + .map(|v| print_enum_value(&v.node)) .collect::>() .join("\n") ); @@ -189,6 +189,15 @@ fn print_type_def(type_def: &TypeDefinition) -> String { } } +fn print_enum_value(value: &async_graphql::parser::types::EnumValueDefinition) -> String { + let directives_str = print_directives(&value.directives); + if directives_str.is_empty() { + format!(" {}", value.value) + } else { + format!(" {} {}", value.value, directives_str) + } +} + fn print_field(field: &async_graphql::parser::types::FieldDefinition) -> String { let directives = print_directives(&field.directives); let args_str = if !field.arguments.is_empty() { @@ -197,7 +206,13 @@ fn print_field(field: &async_graphql::parser::types::FieldDefinition) -> String .iter() .map(|arg| { let nullable = if arg.node.ty.node.nullable { "" } else { "!" }; - format!("{}: {}{}", arg.node.name, arg.node.ty.node.base, nullable) + format!( + "{}: {}{}{}", + arg.node.name, + arg.node.ty.node.base, + nullable, + print_default_value(arg.node.default_value.as_ref()) + ) }) .collect::>() .join(", "); @@ -215,14 +230,25 @@ fn print_field(field: &async_graphql::parser::types::FieldDefinition) -> String doc + node.trim_end() } +fn print_default_value(value: Option<&Positioned>) -> String { + value + .as_ref() + .map(|val| format!(" = {val}")) + .unwrap_or_default() +} + fn print_input_value(field: &async_graphql::parser::types::InputValueDefinition) -> String { let directives_str = print_directives(&field.directives); let doc = field.description.as_ref().map_or(String::new(), |d| { format!(r#" """{} {}{} """{}"#, "\n", d.node, "\n", "\n") }); format!( - "{} {}: {}{}", - doc, field.name.node, field.ty.node, directives_str + "{} {}: {}{}{}", + doc, + field.name.node, + field.ty.node, + directives_str, + print_default_value(field.default_value.as_ref()) ) } fn print_directive(directive: &DirectiveDefinition) -> String { diff --git a/src/core/error.rs b/src/core/error.rs index 92ba8f7dfc..3a26d8d5b0 100644 --- a/src/core/error.rs +++ b/src/core/error.rs @@ -6,6 +6,7 @@ use prost_reflect::DescriptorError; use super::config::UnsupportedConfigFormat; use super::grpc::error::Error as GrpcError; +use super::ir; use super::rest::error::Error as RestError; use super::valid::ValidationError; use crate::cli::error::Error as CLIError; @@ -113,6 +114,9 @@ pub enum Error { #[error("Inquire Error")] Inquire(InquireError), + + #[error("IRError {0}")] + IRError(ir::Error), } pub mod file { @@ -216,6 +220,9 @@ pub mod http { url: String, spec_path: String, }, + + #[error("Hyper HTTP Error")] + Hyper(hyper::Error), } } diff --git a/src/core/generator/from_proto.rs b/src/core/generator/from_proto.rs index 0820c64468..87b48c5071 100644 --- a/src/core/generator/from_proto.rs +++ b/src/core/generator/from_proto.rs @@ -9,7 +9,7 @@ use super::graphql_type::{GraphQLType, Unparsed}; use super::proto::comments_builder::CommentsBuilder; use super::proto::path_builder::PathBuilder; use super::proto::path_field::PathField; -use crate::core::config::{Arg, Config, Enum, Field, Grpc, Tag, Type}; +use crate::core::config::{Arg, Config, Enum, Field, Grpc, Tag, Type, Variant}; use crate::core::error::Error; /// Assists in the mapping and retrieval of proto type names to custom formatted @@ -103,6 +103,11 @@ impl Context { let doc = self.comments_builder.get_comments(&enum_type_path); + let variants_with_comments = variants_with_comments + .into_iter() + .map(|v| Variant { name: v, alias: None }) + .collect(); + self.config .enums .insert(type_name, Enum { variants: variants_with_comments, doc }); diff --git a/src/core/has_headers.rs b/src/core/has_headers.rs index e285caabf3..25c6d44d76 100644 --- a/src/core/has_headers.rs +++ b/src/core/has_headers.rs @@ -6,7 +6,7 @@ pub trait HasHeaders { fn headers(&self) -> &HeaderMap; } -impl<'a, Ctx: ResolverContextLike<'a>> HasHeaders for EvaluationContext<'a, Ctx> { +impl<'a, Ctx: ResolverContextLike> HasHeaders for EvaluationContext<'a, Ctx> { fn headers(&self) -> &HeaderMap { self.headers() } diff --git a/src/core/http/request_context.rs b/src/core/http/request_context.rs index 7ce8a9456e..df7d40dc26 100644 --- a/src/core/http/request_context.rs +++ b/src/core/http/request_context.rs @@ -7,10 +7,9 @@ use cache_control::{Cachability, CacheControl}; use derive_setters::Setters; use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; -use crate::core::async_cache::AsyncCache; use crate::core::auth::context::AuthContext; use crate::core::blueprint::{Server, Upstream}; -use crate::core::data_loader::DataLoader; +use crate::core::data_loader::{DataLoader, DedupeResult}; use crate::core::graphql::GraphqlDataLoader; use crate::core::grpc; use crate::core::grpc::data_loader::GrpcDataLoader; @@ -34,7 +33,8 @@ pub struct RequestContext { pub min_max_age: Arc>>, pub cache_public: Arc>>, pub runtime: TargetRuntime, - pub cache: AsyncCache, + pub cache: DedupeResult, + pub dedupe_handler: Arc>, } impl RequestContext { @@ -50,7 +50,8 @@ impl RequestContext { min_max_age: Arc::new(Mutex::new(None)), cache_public: Arc::new(Mutex::new(None)), runtime: target_runtime, - cache: AsyncCache::new(), + cache: DedupeResult::new(true), + dedupe_handler: Arc::new(DedupeResult::new(false)), allowed_headers: HeaderMap::new(), auth_ctx: AuthContext::default(), } @@ -200,7 +201,8 @@ impl From<&AppContext> for RequestContext { min_max_age: Arc::new(Mutex::new(None)), cache_public: Arc::new(Mutex::new(None)), runtime: app_ctx.runtime.clone(), - cache: AsyncCache::new(), + cache: DedupeResult::new(true), + dedupe_handler: app_ctx.dedupe_handler.clone(), } } } diff --git a/src/core/http/request_handler.rs b/src/core/http/request_handler.rs index 86200c14a3..2f975116ed 100644 --- a/src/core/http/request_handler.rs +++ b/src/core/http/request_handler.rs @@ -102,17 +102,26 @@ pub async fn graphql_request( ) -> Result, Error> { req_counter.set_http_route("/graphql"); let req_ctx = Arc::new(create_request_context(&req, app_ctx)); - let bytes = hyper::body::to_bytes(req.into_body()).await?; + let (req, body) = req.into_parts(); + let bytes = hyper::body::to_bytes(body).await?; let graphql_request = serde_json::from_slice::(&bytes); match graphql_request { Ok(mut request) => { - let _ = request.parse_query(); - let mut response = request.data(req_ctx.clone()).execute(&app_ctx.schema).await; - - response = update_cache_control_header(response, app_ctx, req_ctx.clone()); - let mut resp = response.into_response()?; - update_response_headers(&mut resp, &req_ctx, app_ctx); - Ok(resp) + if !(app_ctx.blueprint.server.dedupe && request.is_query()) { + Ok(execute_query(&app_ctx, &req_ctx, request).await?) + } else { + let operation_id = request.operation_id(&req.headers); + let out = app_ctx + .dedupe_operation_handler + .dedupe(&operation_id, || { + Box::pin(async move { + let resp = execute_query(&app_ctx, &req_ctx, request).await?; + Ok(crate::core::http::Response::from_hyper(resp).await?) + }) + }) + .await?; + Ok(hyper::Response::from(out)) + } } Err(err) => { tracing::error!( @@ -130,6 +139,19 @@ pub async fn graphql_request( } } +async fn execute_query( + app_ctx: &&AppContext, + req_ctx: &Arc, + request: T, +) -> Result, Error> { + let mut response = request.data(req_ctx.clone()).execute(&app_ctx.schema).await; + + response = update_cache_control_header(response, app_ctx, req_ctx.clone()); + let mut resp = response.into_response()?; + update_response_headers(&mut resp, req_ctx, app_ctx); + Ok(resp) +} + fn create_allowed_headers(headers: &HeaderMap, allowed: &BTreeSet) -> HeaderMap { let mut new_headers = HeaderMap::new(); for (k, v) in headers.iter() { diff --git a/src/core/http/request_template.rs b/src/core/http/request_template.rs index fb256fbe35..683b5e3f7f 100644 --- a/src/core/http/request_template.rs +++ b/src/core/http/request_template.rs @@ -231,17 +231,14 @@ impl CacheKey for RequestTemplate { self.method.hash(state); - let mut headers = vec![]; for (name, mustache) in self.headers.iter() { name.hash(state); mustache.render(ctx).hash(state); - headers.push((name.to_string(), mustache.render(ctx))); } for (name, value) in ctx.headers().iter() { name.hash(state); value.hash(state); - headers.push((name.to_string(), value.to_str().unwrap().to_string())); } if let Some(body) = self.body_path.as_ref() { diff --git a/src/core/http/response.rs b/src/core/http/response.rs index 95bb119766..71c1cfa1da 100644 --- a/src/core/http/response.rs +++ b/src/core/http/response.rs @@ -2,6 +2,7 @@ use anyhow::Result; use async_graphql_value::{ConstValue, Name}; use derive_setters::Setters; use hyper::body::Bytes; +use hyper::Body; use indexmap::IndexMap; use prost::Message; use tonic::Status; @@ -55,6 +56,13 @@ impl Response { Ok(Response { status, headers, body }) } + pub async fn from_hyper(resp: hyper::Response) -> Result { + let status = resp.status(); + let headers = resp.headers().to_owned(); + let body = hyper::body::to_bytes(resp.into_body()).await?; + Ok(Response { status, headers, body }) + } + pub fn empty() -> Self { Response { status: reqwest::StatusCode::OK, @@ -149,3 +157,12 @@ impl Response { }) } } + +impl From> for hyper::Response { + fn from(resp: Response) -> Self { + let mut response = hyper::Response::new(Body::from(resp.body)); + *response.headers_mut() = resp.headers; + *response.status_mut() = resp.status; + response + } +} diff --git a/src/core/ir/cache.rs b/src/core/ir/cache.rs index 1a8bc91aa8..c9c27dfe24 100644 --- a/src/core/ir/cache.rs +++ b/src/core/ir/cache.rs @@ -1,7 +1,5 @@ -use core::future::Future; use std::num::NonZeroU64; use std::ops::Deref; -use std::pin::Pin; use async_graphql_value::ConstValue; @@ -42,31 +40,29 @@ impl Cache { } impl Eval for Cache { - fn eval<'a, Ctx: ResolverContextLike<'a> + Sync + Send>( - &'a self, - ctx: EvaluationContext<'a, Ctx>, - ) -> Pin> + 'a + Send>> { - Box::pin(async move { - if let IR::IO(io) = self.expr.deref() { - let key = io.cache_key(&ctx); - if let Some(key) = key { - if let Some(val) = ctx.request_ctx.runtime.cache.get(&key).await? { - Ok(val) - } else { - let val = self.expr.eval(ctx.clone()).await?; - ctx.request_ctx - .runtime - .cache - .set(key, val.clone(), self.max_age) - .await?; - Ok(val) - } + async fn eval(&self, ctx: &mut EvaluationContext<'_, Ctx>) -> Result + where + Ctx: ResolverContextLike + Sync, + { + if let IR::IO(io) = self.expr.deref() { + let key = io.cache_key(ctx); + if let Some(key) = key { + if let Some(val) = ctx.request_ctx.runtime.cache.get(&key).await? { + Ok(val) } else { - self.expr.eval(ctx).await + let val = self.expr.eval(ctx).await?; + ctx.request_ctx + .runtime + .cache + .set(key, val.clone(), self.max_age) + .await?; + Ok(val) } } else { - Ok(self.expr.eval(ctx).await?) + self.expr.eval(ctx).await } - }) + } else { + Ok(self.expr.eval(ctx).await?) + } } } diff --git a/src/core/ir/error.rs b/src/core/ir/error.rs index c444d817ff..cc181748a6 100644 --- a/src/core/ir/error.rs +++ b/src/core/ir/error.rs @@ -19,6 +19,7 @@ pub enum Error { APIValidationError(Vec), + // FIXME: Use specific error types instead of string Other(String), DeserializeError(String), diff --git a/src/core/ir/eval.rs b/src/core/ir/eval.rs index 91a067a23c..924eaba92a 100644 --- a/src/core/ir/eval.rs +++ b/src/core/ir/eval.rs @@ -1,16 +1,12 @@ -use core::future::Future; -use std::pin::Pin; +use std::future::Future; use super::{Error, EvaluationContext, ResolverContextLike}; -pub trait Eval -where - Self: Send + Sync, -{ - fn eval<'a, Ctx: ResolverContextLike<'a> + Sync + Send>( - &'a self, - ctx: EvaluationContext<'a, Ctx>, - ) -> Pin> + 'a + Send>> +pub trait Eval { + fn eval( + &self, + ctx: &mut EvaluationContext<'_, Ctx>, + ) -> impl Future> where - Output: 'a; + Ctx: ResolverContextLike + Sync; } diff --git a/src/core/ir/evaluation_context.rs b/src/core/ir/evaluation_context.rs index 7969b798bd..0cb35f9e8a 100644 --- a/src/core/ir/evaluation_context.rs +++ b/src/core/ir/evaluation_context.rs @@ -10,7 +10,7 @@ use crate::core::http::RequestContext; // TODO: rename to ResolverContext #[derive(Clone)] -pub struct EvaluationContext<'a, Ctx: ResolverContextLike<'a>> { +pub struct EvaluationContext<'a, Ctx: ResolverContextLike> { // Context create for each GraphQL Request pub request_ctx: &'a RequestContext, @@ -25,7 +25,7 @@ pub struct EvaluationContext<'a, Ctx: ResolverContextLike<'a>> { graphql_ctx_args: Option>, } -impl<'a, A: ResolverContextLike<'a>> EvaluationContext<'a, A> { +impl<'a, A: ResolverContextLike> EvaluationContext<'a, A> { pub fn with_value(&self, value: Value) -> EvaluationContext<'a, A> { let mut ctx = self.clone(); ctx.graphql_ctx_value = Some(Arc::new(value)); @@ -37,9 +37,13 @@ impl<'a, A: ResolverContextLike<'a>> EvaluationContext<'a, A> { ctx.graphql_ctx_args = Some(Arc::new(args)); ctx } + + pub fn is_query(&self) -> bool { + self.graphql_ctx.is_query() + } } -impl<'a, Ctx: ResolverContextLike<'a>> EvaluationContext<'a, Ctx> { +impl<'a, Ctx: ResolverContextLike> EvaluationContext<'a, Ctx> { pub fn new(req_ctx: &'a RequestContext, graphql_ctx: &'a Ctx) -> EvaluationContext<'a, Ctx> { Self { request_ctx: req_ctx, @@ -105,7 +109,7 @@ impl<'a, Ctx: ResolverContextLike<'a>> EvaluationContext<'a, Ctx> { } } -impl<'a, Ctx: ResolverContextLike<'a>> GraphQLOperationContext for EvaluationContext<'a, Ctx> { +impl<'a, Ctx: ResolverContextLike> GraphQLOperationContext for EvaluationContext<'a, Ctx> { fn selection_set(&self) -> Option { let selection_set = self.graphql_ctx.field()?.selection_set(); diff --git a/src/core/ir/io.rs b/src/core/ir/io.rs index 6add3a65f7..7cc62b64c4 100644 --- a/src/core/ir/io.rs +++ b/src/core/ir/io.rs @@ -1,5 +1,3 @@ -use core::future::Future; -use std::pin::Pin; use std::sync::Arc; use async_graphql::from_value; @@ -60,108 +58,109 @@ impl DataLoaderId { } impl Eval for IO { - fn eval<'a, Ctx: super::ResolverContextLike<'a> + Sync + Send>( - &'a self, - ctx: super::EvaluationContext<'a, Ctx>, - ) -> Pin> + 'a + Send>> { - if ctx.request_ctx.upstream.dedupe { - Box::pin(async move { - let key = self.cache_key(&ctx); - if let Some(key) = key { + async fn eval(&self, ctx: &mut EvaluationContext<'_, Ctx>) -> Result + where + Ctx: ResolverContextLike + Sync, + { + // Note: Handled the case separately for performance reasons. It avoids cache + // key generation when it's not required + if !ctx.request_ctx.server.dedupe || !ctx.is_query() { + return self.eval_inner(ctx).await; + } + if let Some(key) = self.cache_key(ctx) { + ctx.request_ctx + .cache + .dedupe(&key, || async { ctx.request_ctx - .cache - .get_or_eval(key, move || Box::pin(self.eval_inner(ctx))) + .dedupe_handler + .dedupe(&key, || self.eval_inner(ctx)) .await - .as_ref() - .clone() - } else { - self.eval_inner(ctx).await - } - }) + }) + .await } else { - Box::pin(self.eval_inner(ctx)) + self.eval_inner(ctx).await } } } impl IO { - fn eval_inner<'a, Ctx: super::ResolverContextLike<'a> + Sync + Send>( - &'a self, - ctx: super::EvaluationContext<'a, Ctx>, - ) -> Pin> + 'a + Send>> { - Box::pin(async move { - match self { - IO::Http { req_template, dl_id, http_filter, .. } => { - let worker = &ctx.request_ctx.runtime.cmd_worker; - let executor = HttpRequestExecutor::new(ctx, req_template, dl_id); - let request = executor.init_request()?; - let response = match (&worker, http_filter) { - (Some(worker), Some(http_filter)) => { - executor - .execute_with_worker(request, worker, http_filter) - .await? - } - _ => executor.execute(request).await?, - }; - - Ok(response.body) - } - IO::GraphQL { req_template, field_name, dl_id, .. } => { - let req = req_template.to_request(&ctx)?; + async fn eval_inner( + &self, + ctx: &mut EvaluationContext<'_, Ctx>, + ) -> Result + where + Ctx: ResolverContextLike + Sync, + { + match self { + IO::Http { req_template, dl_id, http_filter, .. } => { + let worker = &ctx.request_ctx.runtime.cmd_worker; + let executor = HttpRequestExecutor::new(ctx, req_template, dl_id); + let request = executor.init_request()?; + let response = match (&worker, http_filter) { + (Some(worker), Some(http_filter)) => { + executor + .execute_with_worker(request, worker, http_filter) + .await? + } + _ => executor.execute(request).await?, + }; - let res = if ctx.request_ctx.upstream.batch.is_some() - && matches!(req_template.operation_type, GraphQLOperationType::Query) - { - let data_loader: Option<&DataLoader> = - dl_id.and_then(|index| ctx.request_ctx.gql_data_loaders.get(index.0)); - execute_request_with_dl(&ctx, req, data_loader).await? - } else { - execute_raw_request(&ctx, req).await? - }; + Ok(response.body) + } + IO::GraphQL { req_template, field_name, dl_id, .. } => { + let req = req_template.to_request(ctx)?; + + let res = if ctx.request_ctx.upstream.batch.is_some() + && matches!(req_template.operation_type, GraphQLOperationType::Query) + { + let data_loader: Option<&DataLoader> = + dl_id.and_then(|index| ctx.request_ctx.gql_data_loaders.get(index.0)); + execute_request_with_dl(ctx, req, data_loader).await? + } else { + execute_raw_request(ctx, req).await? + }; - set_headers(&ctx, &res); - parse_graphql_response(&ctx, res, field_name) - } - IO::Grpc { req_template, dl_id, .. } => { - let rendered = req_template.render(&ctx)?; + set_headers(ctx, &res); + parse_graphql_response(ctx, res, field_name) + } + IO::Grpc { req_template, dl_id, .. } => { + let rendered = req_template.render(ctx)?; - let res = if ctx.request_ctx.upstream.batch.is_some() && + let res = if ctx.request_ctx.upstream.batch.is_some() && // TODO: share check for operation_type for resolvers matches!(req_template.operation_type, GraphQLOperationType::Query) - { - let data_loader: Option< - &DataLoader, - > = dl_id.and_then(|index| ctx.request_ctx.grpc_data_loaders.get(index.0)); - execute_grpc_request_with_dl(&ctx, rendered, data_loader).await? - } else { - let req = rendered.to_request()?; - execute_raw_grpc_request(&ctx, req, &req_template.operation).await? - }; + { + let data_loader: Option<&DataLoader> = + dl_id.and_then(|index| ctx.request_ctx.grpc_data_loaders.get(index.0)); + execute_grpc_request_with_dl(ctx, rendered, data_loader).await? + } else { + let req = rendered.to_request()?; + execute_raw_grpc_request(ctx, req, &req_template.operation).await? + }; - set_headers(&ctx, &res); + set_headers(ctx, &res); - Ok(res.body) - } - IO::Js { name } => { - if let Some((worker, value)) = ctx - .request_ctx - .runtime - .worker - .as_ref() - .zip(ctx.value().cloned()) - { - let val = worker.call(name, value).await?; - Ok(val.unwrap_or_default()) - } else { - Ok(ConstValue::Null) - } + Ok(res.body) + } + IO::Js { name } => { + if let Some((worker, value)) = ctx + .request_ctx + .runtime + .worker + .as_ref() + .zip(ctx.value().cloned()) + { + let val = worker.call(name, value).await?; + Ok(val.unwrap_or_default()) + } else { + Ok(ConstValue::Null) } } - }) + } } } -impl<'a, Ctx: ResolverContextLike<'a> + Sync + Send> CacheKey> for IO { +impl<'a, Ctx: ResolverContextLike + Sync> CacheKey> for IO { fn cache_key(&self, ctx: &EvaluationContext<'a, Ctx>) -> Option { match self { IO::Http { req_template, .. } => req_template.cache_key(ctx), @@ -172,8 +171,8 @@ impl<'a, Ctx: ResolverContextLike<'a> + Sync + Send> CacheKey>( - ctx: &EvaluationContext<'ctx, Ctx>, +fn set_headers( + ctx: &EvaluationContext<'_, Ctx>, res: &Response, ) { set_cache_control(ctx, res); @@ -181,8 +180,8 @@ fn set_headers<'ctx, Ctx: ResolverContextLike<'ctx>>( set_experimental_headers(ctx, res); } -fn set_cache_control<'ctx, Ctx: ResolverContextLike<'ctx>>( - ctx: &EvaluationContext<'ctx, Ctx>, +fn set_cache_control( + ctx: &EvaluationContext<'_, Ctx>, res: &Response, ) { if ctx.request_ctx.server.get_enable_cache_control() && res.status.is_success() { @@ -192,15 +191,15 @@ fn set_cache_control<'ctx, Ctx: ResolverContextLike<'ctx>>( } } -fn set_experimental_headers<'ctx, Ctx: ResolverContextLike<'ctx>>( - ctx: &EvaluationContext<'ctx, Ctx>, +fn set_experimental_headers( + ctx: &EvaluationContext<'_, Ctx>, res: &Response, ) { ctx.request_ctx.add_x_headers(&res.headers); } -fn set_cookie_headers<'ctx, Ctx: ResolverContextLike<'ctx>>( - ctx: &EvaluationContext<'ctx, Ctx>, +fn set_cookie_headers( + ctx: &EvaluationContext<'_, Ctx>, res: &Response, ) { if res.status.is_success() { @@ -208,8 +207,8 @@ fn set_cookie_headers<'ctx, Ctx: ResolverContextLike<'ctx>>( } } -async fn execute_raw_request<'ctx, Ctx: ResolverContextLike<'ctx>>( - ctx: &EvaluationContext<'ctx, Ctx>, +async fn execute_raw_request( + ctx: &EvaluationContext<'_, Ctx>, req: Request, ) -> Result, Error> { let response = ctx @@ -224,8 +223,8 @@ async fn execute_raw_request<'ctx, Ctx: ResolverContextLike<'ctx>>( Ok(response) } -async fn execute_raw_grpc_request<'ctx, Ctx: ResolverContextLike<'ctx>>( - ctx: &EvaluationContext<'ctx, Ctx>, +async fn execute_raw_grpc_request( + ctx: &EvaluationContext<'_, Ctx>, req: Request, operation: &ProtobufOperation, ) -> Result, Error> { @@ -235,11 +234,10 @@ async fn execute_raw_grpc_request<'ctx, Ctx: ResolverContextLike<'ctx>>( } async fn execute_grpc_request_with_dl< - 'ctx, - Ctx: ResolverContextLike<'ctx>, + Ctx: ResolverContextLike, Dl: Loader, Error = Arc>, >( - ctx: &EvaluationContext<'ctx, Ctx>, + ctx: &EvaluationContext<'_, Ctx>, rendered: RenderedRequestTemplate, data_loader: Option<&DataLoader>, ) -> Result, Error> { @@ -262,7 +260,7 @@ async fn execute_grpc_request_with_dl< async fn execute_request_with_dl< 'ctx, - Ctx: ResolverContextLike<'ctx>, + Ctx: ResolverContextLike, Dl: Loader, Error = Arc>, >( ctx: &EvaluationContext<'ctx, Ctx>, @@ -286,8 +284,8 @@ async fn execute_request_with_dl< .unwrap_or_default()) } -fn parse_graphql_response<'ctx, Ctx: ResolverContextLike<'ctx>>( - ctx: &EvaluationContext<'ctx, Ctx>, +fn parse_graphql_response( + ctx: &EvaluationContext<'_, Ctx>, res: Response, field_name: &str, ) -> Result { @@ -310,15 +308,15 @@ fn parse_graphql_response<'ctx, Ctx: ResolverContextLike<'ctx>>( /// and getting a response. There are optimizations and customizations that the /// user might have configured. HttpRequestExecutor is responsible for handling /// all of that. -struct HttpRequestExecutor<'a, Context: ResolverContextLike<'a> + Send + Sync> { - evaluation_ctx: EvaluationContext<'a, Context>, +struct HttpRequestExecutor<'a, 'ctx, Context: ResolverContextLike + Sync> { + evaluation_ctx: &'ctx EvaluationContext<'a, Context>, data_loader: Option<&'a DataLoader>, request_template: &'a http::RequestTemplate, } -impl<'a, Context: ResolverContextLike<'a> + Send + Sync> HttpRequestExecutor<'a, Context> { +impl<'a, 'ctx, Context: ResolverContextLike + Sync> HttpRequestExecutor<'a, 'ctx, Context> { pub fn new( - evaluation_ctx: EvaluationContext<'a, Context>, + evaluation_ctx: &'ctx EvaluationContext<'a, Context>, request_template: &'a RequestTemplate, id: &Option, ) -> Self { @@ -332,8 +330,7 @@ impl<'a, Context: ResolverContextLike<'a> + Send + Sync> HttpRequestExecutor<'a, } pub fn init_request(&self) -> Result { - let ctx = &self.evaluation_ctx; - Ok(self.request_template.to_request(ctx)?) + Ok(self.request_template.to_request(self.evaluation_ctx)?) } async fn execute(&self, req: Request) -> Result, Error> { diff --git a/src/core/ir/mod.rs b/src/core/ir/mod.rs index 770c056d85..573c4a949a 100644 --- a/src/core/ir/mod.rs +++ b/src/core/ir/mod.rs @@ -7,9 +7,9 @@ mod io; mod modify; mod resolver_context_like; -use core::future::Future; +use std::collections::HashMap; use std::fmt::Debug; -use std::pin::Pin; +use std::future::Future; use async_graphql_value::ConstValue; pub use cache::*; @@ -34,6 +34,7 @@ pub enum IR { Cache(Cache), Path(Box, Vec), Protect(Box), + Map(Map), } #[derive(Clone, Debug)] @@ -54,12 +55,40 @@ impl IR { } } +#[derive(Clone, Debug)] +pub struct Map { + pub input: Box, + // accept key return value instead of + pub map: HashMap, +} + +impl Eval for Map { + async fn eval(&self, ctx: &mut EvaluationContext<'_, Ctx>) -> Result + where + Ctx: ResolverContextLike + Sync, + { + let value = self.input.eval(ctx).await?; + if let ConstValue::String(key) = value { + if let Some(value) = self.map.get(&key) { + Ok(ConstValue::String(value.to_owned())) + } else { + Err(Error::Other(format!("Can't find mapped key: {}.", key))) + } + } else { + Err(Error::Other("Mapped key must be string value.".to_owned())) + } + } +} + impl Eval for IR { #[tracing::instrument(skip_all, fields(otel.name = %self))] - fn eval<'a, Ctx: ResolverContextLike<'a> + Sync + Send>( - &'a self, - ctx: EvaluationContext<'a, Ctx>, - ) -> Pin> + 'a + Send>> { + fn eval( + &self, + ctx: &mut EvaluationContext<'_, Ctx>, + ) -> impl Future> + where + Ctx: ResolverContextLike + Sync, + { Box::pin(async move { match self { IR::Context(op) => match op { @@ -71,13 +100,13 @@ impl Eval for IR { .map(|a| a.into_owned()) .unwrap_or(async_graphql::Value::Null)), Context::PushArgs { expr, and_then } => { - let args = expr.eval(ctx.clone()).await?; - let ctx = ctx.with_args(args).clone(); + let args = expr.eval(ctx).await?; + let ctx = &mut ctx.with_args(args); and_then.eval(ctx).await } Context::PushValue { expr, and_then } => { - let value = expr.eval(ctx.clone()).await?; - let ctx = ctx.with_value(value); + let value = expr.eval(ctx).await?; + let ctx = &mut ctx.with_value(value); and_then.eval(ctx).await } }, @@ -88,7 +117,7 @@ impl Eval for IR { .unwrap_or(&async_graphql::Value::Null) .clone()) } - IR::Dynamic(value) => Ok(value.render_value(&ctx)), + IR::Dynamic(value) => Ok(value.render_value(ctx)), IR::Protect(expr) => { ctx.request_ctx .auth_ctx @@ -99,6 +128,7 @@ impl Eval for IR { } IR::IO(operation) => operation.eval(ctx).await, IR::Cache(cached) => cached.eval(ctx).await, + IR::Map(map) => map.eval(ctx).await, } }) } diff --git a/src/core/ir/modify.rs b/src/core/ir/modify.rs index dadead02fc..6e1137f7fc 100644 --- a/src/core/ir/modify.rs +++ b/src/core/ir/modify.rs @@ -1,4 +1,4 @@ -use super::{Cache, IR}; +use super::{Cache, Map, IR}; impl IR { pub fn modify(self, mut f: impl FnMut(&IR) -> Option) -> IR { @@ -38,6 +38,9 @@ impl IR { } IR::Path(expr, path) => IR::Path(expr.modify_box(modifier), path), IR::Protect(expr) => IR::Protect(expr.modify_box(modifier)), + IR::Map(Map { input, map }) => { + IR::Map(Map { input: input.modify_box(modifier), map }) + } } } } diff --git a/src/core/ir/resolver_context_like.rs b/src/core/ir/resolver_context_like.rs index 57cdafe707..92333a92d9 100644 --- a/src/core/ir/resolver_context_like.rs +++ b/src/core/ir/resolver_context_like.rs @@ -1,33 +1,39 @@ use std::sync::Arc; use async_graphql::context::SelectionField; +use async_graphql::parser::types::OperationType; use async_graphql::{Name, ServerError, Value}; use indexmap::IndexMap; -pub trait ResolverContextLike<'a>: Clone { - fn value(&'a self) -> Option<&'a Value>; - fn args(&'a self) -> Option<&'a IndexMap>; - fn field(&'a self) -> Option; - fn add_error(&'a self, error: ServerError); +pub trait ResolverContextLike: Clone { + fn value(&self) -> Option<&Value>; + fn args(&self) -> Option<&IndexMap>; + fn field(&self) -> Option; + fn is_query(&self) -> bool; + fn add_error(&self, error: ServerError); } #[derive(Clone)] pub struct EmptyResolverContext; -impl<'a> ResolverContextLike<'a> for EmptyResolverContext { - fn value(&'a self) -> Option<&'a Value> { +impl ResolverContextLike for EmptyResolverContext { + fn value(&self) -> Option<&Value> { None } - fn args(&'a self) -> Option<&'a IndexMap> { + fn args(&self) -> Option<&IndexMap> { None } - fn field(&'a self) -> Option { + fn field(&self) -> Option { None } - fn add_error(&'a self, _: ServerError) {} + fn is_query(&self) -> bool { + false + } + + fn add_error(&self, _: ServerError) {} } #[derive(Clone)] @@ -41,20 +47,24 @@ impl<'a> From> for ResolverContext<' } } -impl<'a> ResolverContextLike<'a> for ResolverContext<'a> { - fn value(&'a self) -> Option<&'a Value> { +impl<'a> ResolverContextLike for ResolverContext<'a> { + fn value(&self) -> Option<&Value> { self.inner.parent_value.as_value() } - fn args(&'a self) -> Option<&'a IndexMap> { + fn args(&self) -> Option<&IndexMap> { Some(self.inner.args.as_index_map()) } - fn field(&'a self) -> Option { + fn field(&self) -> Option { Some(self.inner.ctx.field()) } - fn add_error(&'a self, error: ServerError) { + fn is_query(&self) -> bool { + self.inner.ctx.query_env.operation.node.ty == OperationType::Query + } + + fn add_error(&self, error: ServerError) { self.inner.ctx.add_error(error) } } diff --git a/src/core/mod.rs b/src/core/mod.rs index 82531477de..73a3f0c980 100644 --- a/src/core/mod.rs +++ b/src/core/mod.rs @@ -2,7 +2,6 @@ #![allow(clippy::mutable_key_type)] mod app_context; -pub mod async_cache; pub mod async_graphql_hyper; mod auth; pub mod blueprint; diff --git a/src/core/path.rs b/src/core/path.rs index 849594341e..ac40ffef32 100644 --- a/src/core/path.rs +++ b/src/core/path.rs @@ -49,7 +49,7 @@ fn convert_value(value: Cow<'_, async_graphql::Value>) -> Option> { } } -impl<'a, Ctx: ResolverContextLike<'a>> PathString for EvaluationContext<'a, Ctx> { +impl<'a, Ctx: ResolverContextLike> PathString for EvaluationContext<'a, Ctx> { fn path_string>(&self, path: &[T]) -> Option> { let ctx = self; @@ -78,7 +78,7 @@ impl<'a, Ctx: ResolverContextLike<'a>> PathString for EvaluationContext<'a, Ctx> } } -impl<'a, Ctx: ResolverContextLike<'a>> PathGraphql for EvaluationContext<'a, Ctx> { +impl<'a, Ctx: ResolverContextLike> PathGraphql for EvaluationContext<'a, Ctx> { fn path_graphql>(&self, path: &[T]) -> Option { let ctx = self; @@ -192,20 +192,24 @@ mod tests { #[derive(Clone)] struct MockGraphqlContext; - impl<'a> ResolverContextLike<'a> for MockGraphqlContext { - fn value(&'a self) -> Option<&'a Value> { + impl ResolverContextLike for MockGraphqlContext { + fn value(&self) -> Option<&Value> { Some(&TEST_VALUES) } - fn args(&'a self) -> Option<&'a IndexMap> { + fn args(&self) -> Option<&IndexMap> { Some(&TEST_ARGS) } - fn field(&'a self) -> Option { + fn field(&self) -> Option { None } - fn add_error(&'a self, _: async_graphql::ServerError) {} + fn is_query(&self) -> bool { + false + } + + fn add_error(&self, _: async_graphql::ServerError) {} } static REQ_CTX: Lazy = Lazy::new(|| { diff --git a/src/core/rest/endpoint.rs b/src/core/rest/endpoint.rs index f89b416bb0..2131a1f15c 100644 --- a/src/core/rest/endpoint.rs +++ b/src/core/rest/endpoint.rs @@ -157,7 +157,7 @@ impl Endpoint { // Query let query = self.query_params.matches(query_params)?; - // FIXME: Too much cloning is happening via merge_variables + // TODO: Too much cloning is happening via merge_variables variables = merge_variables(variables, path); variables = merge_variables(variables, query); diff --git a/tailcall-cloudflare/package-lock.json b/tailcall-cloudflare/package-lock.json index 37d1ce760d..b1875cd304 100644 --- a/tailcall-cloudflare/package-lock.json +++ b/tailcall-cloudflare/package-lock.json @@ -27,9 +27,9 @@ } }, "node_modules/@cloudflare/workerd-darwin-64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20240605.0.tgz", - "integrity": "sha512-6V4Uze6jEM1mPBdPO6AevPwAOG2s+auEG1vPzZilwbrpn3BbYklEpQqcAZj05uUXaM6rnffnXerW8X8Fc8l4qQ==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20240610.1.tgz", + "integrity": "sha512-YanZ1iXgMGaUWlleB5cswSE6qbzyjQ8O7ENWZcPAcZZ6BfuL7q3CWi0t9iM1cv2qx92rRztsRTyjcfq099++XQ==", "cpu": [ "x64" ], @@ -43,9 +43,9 @@ } }, "node_modules/@cloudflare/workerd-darwin-arm64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20240605.0.tgz", - "integrity": "sha512-ZNxjVSeMYUhTfVlrMsVjpN5eHA2kq3+S7ZMsGu5l44ZqFrDygsFDoc9C4anJVUEIHGFUB9LMu4ZTdS5S80hvPQ==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20240610.1.tgz", + "integrity": "sha512-bRe/y/LKjIgp3L2EHjc+CvoCzfHhf4aFTtOBkv2zW+VToNJ4KlXridndf7LvR9urfsFRRo9r4TXCssuKaU+ypQ==", "cpu": [ "arm64" ], @@ -59,9 +59,9 @@ } }, "node_modules/@cloudflare/workerd-linux-64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20240605.0.tgz", - "integrity": "sha512-zqOWDrYEudW5JCcU8lxCFQ96UHJJHrM+uvGaRS4u5nJaEgMr2z7u9I2286+l1R3JWvJdqj9ehGuHQvZkaTADxw==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20240610.1.tgz", + "integrity": "sha512-2zDcadR7+Gs9SjcMXmwsMji2Xs+yASGNA2cEHDuFc4NMUup+eL1mkzxc/QzvFjyBck98e92rBjMZt2dVscpGKg==", "cpu": [ "x64" ], @@ -75,9 +75,9 @@ } }, "node_modules/@cloudflare/workerd-linux-arm64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20240605.0.tgz", - "integrity": "sha512-qFTVNem7bMsU9P1dXUi+kb8EdU5aag1I9RQq6ZLS/zfiJ0a/UasihwQG8lrzT7k9x80VnpyCekNmd625qsVZjQ==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20240610.1.tgz", + "integrity": "sha512-7y41rPi5xmIYJN8CY+t3RHnjLL0xx/WYmaTd/j552k1qSr02eTE2o/TGyWZmGUC+lWnwdPQJla0mXbvdqgRdQg==", "cpu": [ "arm64" ], @@ -91,9 +91,9 @@ } }, "node_modules/@cloudflare/workerd-windows-64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20240605.0.tgz", - "integrity": "sha512-s0U7d52ALQtb0enbHJ/AXmy+pyBQVoTIaAdAApy/PWrMiAnb8iJhf7A35pRTYfty5SUf7EX9BAPcKmeh+t3N5g==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20240610.1.tgz", + "integrity": "sha512-B0LyT3DB6rXHWNptnntYHPaoJIy0rXnGfeDBM3nEVV8JIsQrx8MEFn2F2jYioH1FkUVavsaqKO/zUosY3tZXVA==", "cpu": [ "x64" ], @@ -107,9 +107,9 @@ } }, "node_modules/@cloudflare/workers-types": { - "version": "4.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20240605.0.tgz", - "integrity": "sha512-zJw4Q6CnkaQ5JZmHRkNiSs5GfiRgUIUL8BIHPQkd2XUHZkIBv9M9yc0LKEwMYGpCFC+oSOltet6c9RjP9uQ99g==", + "version": "4.20240614.0", + "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20240614.0.tgz", + "integrity": "sha512-fnV3uXD1Hpq5EWnY7XYb+smPcjzIoUFiZpTSV/Tk8qKL3H+w6IqcngZwXQBZ/2U/DwYkDilXHW3FfPhnyD7FZA==", "dev": true }, "node_modules/@cspotcode/source-map-support": { @@ -1454,9 +1454,9 @@ } }, "node_modules/miniflare": { - "version": "3.20240605.0", - "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-3.20240605.0.tgz", - "integrity": "sha512-wE59RULU5zo6eYhL8j3wYdoOQ5istlkMruEr5pYvykL0LJecjKlFc8/cu4WJ5JdkdhutE1320Awi7WEICcolEw==", + "version": "3.20240610.0", + "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-3.20240610.0.tgz", + "integrity": "sha512-J6aXmkII5gcq+kC4TurxKiR4rC++apPST/K8P/YjqoQQgrJ+NRPacBhf6iVh8R3ujnXYXaq+Ae+gm+LM0XHK/w==", "dev": true, "dependencies": { "@cspotcode/source-map-support": "0.8.1", @@ -1467,7 +1467,7 @@ "glob-to-regexp": "^0.4.1", "stoppable": "^1.1.0", "undici": "^5.28.2", - "workerd": "1.20240605.0", + "workerd": "1.20240610.1", "ws": "^8.11.0", "youch": "^3.2.2", "zod": "^3.20.6" @@ -2256,9 +2256,9 @@ } }, "node_modules/workerd": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20240605.0.tgz", - "integrity": "sha512-2yhzgaprAOFm7H988xlRFmU4rOLXhSsq24wh6ayucMB3ORfe/nYJ2ysFn1mzjB+UxEJVt5PhixgHkZLv1S8UPQ==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20240610.1.tgz", + "integrity": "sha512-Rtut5GrsODQMh6YU43b9WZ980Wd05Ov1/ds88pT/SoetmXFBvkBzdRfiHiATv+azmGX8KveE0i/Eqzk/yI01ug==", "dev": true, "hasInstallScript": true, "bin": { @@ -2268,17 +2268,17 @@ "node": ">=16" }, "optionalDependencies": { - "@cloudflare/workerd-darwin-64": "1.20240605.0", - "@cloudflare/workerd-darwin-arm64": "1.20240605.0", - "@cloudflare/workerd-linux-64": "1.20240605.0", - "@cloudflare/workerd-linux-arm64": "1.20240605.0", - "@cloudflare/workerd-windows-64": "1.20240605.0" + "@cloudflare/workerd-darwin-64": "1.20240610.1", + "@cloudflare/workerd-darwin-arm64": "1.20240610.1", + "@cloudflare/workerd-linux-64": "1.20240610.1", + "@cloudflare/workerd-linux-arm64": "1.20240610.1", + "@cloudflare/workerd-windows-64": "1.20240610.1" } }, "node_modules/wrangler": { - "version": "3.60.0", - "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-3.60.0.tgz", - "integrity": "sha512-8lW4E7VAhCVXy7AV7Vom2BsyjekIt15Aa8jnBLsfE2CxalzWQzx49BK25hOYhjHQT4GqBXeoNOZ43FrMQ+SUUA==", + "version": "3.60.3", + "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-3.60.3.tgz", + "integrity": "sha512-a6zn/KFnYaYp3nxJR/aP0TeaBvJDkrrfI89KoxUtx28H7zpya/5/VLu3CxQ3PRspEojJGF0s6f3/pddRy3F+BQ==", "dev": true, "dependencies": { "@cloudflare/kv-asset-handler": "0.3.2", @@ -2287,7 +2287,7 @@ "blake3-wasm": "^2.1.5", "chokidar": "^3.5.3", "esbuild": "0.17.19", - "miniflare": "3.20240605.0", + "miniflare": "3.20240610.0", "nanoid": "^3.3.3", "path-to-regexp": "^6.2.0", "resolve": "^1.22.8", @@ -2776,44 +2776,44 @@ } }, "@cloudflare/workerd-darwin-64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20240605.0.tgz", - "integrity": "sha512-6V4Uze6jEM1mPBdPO6AevPwAOG2s+auEG1vPzZilwbrpn3BbYklEpQqcAZj05uUXaM6rnffnXerW8X8Fc8l4qQ==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20240610.1.tgz", + "integrity": "sha512-YanZ1iXgMGaUWlleB5cswSE6qbzyjQ8O7ENWZcPAcZZ6BfuL7q3CWi0t9iM1cv2qx92rRztsRTyjcfq099++XQ==", "dev": true, "optional": true }, "@cloudflare/workerd-darwin-arm64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20240605.0.tgz", - "integrity": "sha512-ZNxjVSeMYUhTfVlrMsVjpN5eHA2kq3+S7ZMsGu5l44ZqFrDygsFDoc9C4anJVUEIHGFUB9LMu4ZTdS5S80hvPQ==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20240610.1.tgz", + "integrity": "sha512-bRe/y/LKjIgp3L2EHjc+CvoCzfHhf4aFTtOBkv2zW+VToNJ4KlXridndf7LvR9urfsFRRo9r4TXCssuKaU+ypQ==", "dev": true, "optional": true }, "@cloudflare/workerd-linux-64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20240605.0.tgz", - "integrity": "sha512-zqOWDrYEudW5JCcU8lxCFQ96UHJJHrM+uvGaRS4u5nJaEgMr2z7u9I2286+l1R3JWvJdqj9ehGuHQvZkaTADxw==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20240610.1.tgz", + "integrity": "sha512-2zDcadR7+Gs9SjcMXmwsMji2Xs+yASGNA2cEHDuFc4NMUup+eL1mkzxc/QzvFjyBck98e92rBjMZt2dVscpGKg==", "dev": true, "optional": true }, "@cloudflare/workerd-linux-arm64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20240605.0.tgz", - "integrity": "sha512-qFTVNem7bMsU9P1dXUi+kb8EdU5aag1I9RQq6ZLS/zfiJ0a/UasihwQG8lrzT7k9x80VnpyCekNmd625qsVZjQ==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20240610.1.tgz", + "integrity": "sha512-7y41rPi5xmIYJN8CY+t3RHnjLL0xx/WYmaTd/j552k1qSr02eTE2o/TGyWZmGUC+lWnwdPQJla0mXbvdqgRdQg==", "dev": true, "optional": true }, "@cloudflare/workerd-windows-64": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20240605.0.tgz", - "integrity": "sha512-s0U7d52ALQtb0enbHJ/AXmy+pyBQVoTIaAdAApy/PWrMiAnb8iJhf7A35pRTYfty5SUf7EX9BAPcKmeh+t3N5g==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20240610.1.tgz", + "integrity": "sha512-B0LyT3DB6rXHWNptnntYHPaoJIy0rXnGfeDBM3nEVV8JIsQrx8MEFn2F2jYioH1FkUVavsaqKO/zUosY3tZXVA==", "dev": true, "optional": true }, "@cloudflare/workers-types": { - "version": "4.20240605.0", - "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20240605.0.tgz", - "integrity": "sha512-zJw4Q6CnkaQ5JZmHRkNiSs5GfiRgUIUL8BIHPQkd2XUHZkIBv9M9yc0LKEwMYGpCFC+oSOltet6c9RjP9uQ99g==", + "version": "4.20240614.0", + "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20240614.0.tgz", + "integrity": "sha512-fnV3uXD1Hpq5EWnY7XYb+smPcjzIoUFiZpTSV/Tk8qKL3H+w6IqcngZwXQBZ/2U/DwYkDilXHW3FfPhnyD7FZA==", "dev": true }, "@cspotcode/source-map-support": { @@ -3661,9 +3661,9 @@ "dev": true }, "miniflare": { - "version": "3.20240605.0", - "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-3.20240605.0.tgz", - "integrity": "sha512-wE59RULU5zo6eYhL8j3wYdoOQ5istlkMruEr5pYvykL0LJecjKlFc8/cu4WJ5JdkdhutE1320Awi7WEICcolEw==", + "version": "3.20240610.0", + "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-3.20240610.0.tgz", + "integrity": "sha512-J6aXmkII5gcq+kC4TurxKiR4rC++apPST/K8P/YjqoQQgrJ+NRPacBhf6iVh8R3ujnXYXaq+Ae+gm+LM0XHK/w==", "dev": true, "requires": { "@cspotcode/source-map-support": "0.8.1", @@ -3674,7 +3674,7 @@ "glob-to-regexp": "^0.4.1", "stoppable": "^1.1.0", "undici": "^5.28.2", - "workerd": "1.20240605.0", + "workerd": "1.20240610.1", "ws": "^8.11.0", "youch": "^3.2.2", "zod": "^3.20.6" @@ -4206,22 +4206,22 @@ } }, "workerd": { - "version": "1.20240605.0", - "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20240605.0.tgz", - "integrity": "sha512-2yhzgaprAOFm7H988xlRFmU4rOLXhSsq24wh6ayucMB3ORfe/nYJ2ysFn1mzjB+UxEJVt5PhixgHkZLv1S8UPQ==", + "version": "1.20240610.1", + "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20240610.1.tgz", + "integrity": "sha512-Rtut5GrsODQMh6YU43b9WZ980Wd05Ov1/ds88pT/SoetmXFBvkBzdRfiHiATv+azmGX8KveE0i/Eqzk/yI01ug==", "dev": true, "requires": { - "@cloudflare/workerd-darwin-64": "1.20240605.0", - "@cloudflare/workerd-darwin-arm64": "1.20240605.0", - "@cloudflare/workerd-linux-64": "1.20240605.0", - "@cloudflare/workerd-linux-arm64": "1.20240605.0", - "@cloudflare/workerd-windows-64": "1.20240605.0" + "@cloudflare/workerd-darwin-64": "1.20240610.1", + "@cloudflare/workerd-darwin-arm64": "1.20240610.1", + "@cloudflare/workerd-linux-64": "1.20240610.1", + "@cloudflare/workerd-linux-arm64": "1.20240610.1", + "@cloudflare/workerd-windows-64": "1.20240610.1" } }, "wrangler": { - "version": "3.60.0", - "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-3.60.0.tgz", - "integrity": "sha512-8lW4E7VAhCVXy7AV7Vom2BsyjekIt15Aa8jnBLsfE2CxalzWQzx49BK25hOYhjHQT4GqBXeoNOZ43FrMQ+SUUA==", + "version": "3.60.3", + "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-3.60.3.tgz", + "integrity": "sha512-a6zn/KFnYaYp3nxJR/aP0TeaBvJDkrrfI89KoxUtx28H7zpya/5/VLu3CxQ3PRspEojJGF0s6f3/pddRy3F+BQ==", "dev": true, "requires": { "@cloudflare/kv-asset-handler": "0.3.2", @@ -4231,7 +4231,7 @@ "chokidar": "^3.5.3", "esbuild": "0.17.19", "fsevents": "~2.3.2", - "miniflare": "3.20240605.0", + "miniflare": "3.20240610.0", "nanoid": "^3.3.3", "path-to-regexp": "^6.2.0", "resolve": "^1.22.8", diff --git a/tailcall-fixtures/fixtures/configs/name-generation.graphql b/tailcall-fixtures/fixtures/configs/name-generation.graphql new file mode 100644 index 0000000000..3eb77f3468 --- /dev/null +++ b/tailcall-fixtures/fixtures/configs/name-generation.graphql @@ -0,0 +1,21 @@ +schema @server(port: 8000, hostname: "0.0.0.0") @upstream(baseURL: "http://example.typicode.com", httpCache: 42) { + query: Query +} +type T3 { + name: String + hexCode: String +} + +type T2 { + colors: [T3] + isColorPageExists: Boolean + isColorsImageAvailable: Boolean +} + +type T1 { + color: T2 +} + +type Query { + f1: T1 @http(path: "/colors") +} diff --git a/tailcall-query-plan/src/execution/executor.rs b/tailcall-query-plan/src/execution/executor.rs index 05601aa60b..c952742226 100644 --- a/tailcall-query-plan/src/execution/executor.rs +++ b/tailcall-query-plan/src/execution/executor.rs @@ -134,20 +134,24 @@ struct GraphqlContext<'a> { value: Option<&'a Value>, } -impl<'a> ResolverContextLike<'a> for GraphqlContext<'a> { - fn value(&'a self) -> Option<&'a Value> { +impl<'a> ResolverContextLike for GraphqlContext<'a> { + fn value(&self) -> Option<&Value> { self.value } - fn args(&'a self) -> Option<&'a IndexMap> { + fn args(&self) -> Option<&IndexMap> { self.arguments } - fn field(&'a self) -> Option { + fn field(&self) -> Option { None } - fn add_error(&'a self, _error: async_graphql::ServerError) { + fn is_query(&self) -> bool { + false + } + + fn add_error(&self, _error: async_graphql::ServerError) { // TODO: add implementation } } diff --git a/tailcall-query-plan/src/resolver.rs b/tailcall-query-plan/src/resolver.rs index 9d7e2f471a..d1cfd84ef6 100644 --- a/tailcall-query-plan/src/resolver.rs +++ b/tailcall-query-plan/src/resolver.rs @@ -55,11 +55,11 @@ impl Display for FieldPlan { } impl FieldPlan { - pub async fn eval<'a, Ctx: ResolverContextLike<'a> + Sync + Send>( - &'a self, - ctx: EvaluationContext<'a, Ctx>, + pub async fn eval( + &self, + mut ctx: EvaluationContext<'_, Ctx>, ) -> Result { - Ok(self.resolver.eval(ctx).await?) + Ok(self.resolver.eval(&mut ctx).await?) } } diff --git a/tailcall-typedefs/src/gen_gql_schema.rs b/tailcall-typedefs/src/gen_gql_schema.rs index 6b00774e6c..c3925c0139 100644 --- a/tailcall-typedefs/src/gen_gql_schema.rs +++ b/tailcall-typedefs/src/gen_gql_schema.rs @@ -40,6 +40,7 @@ lazy_static! { ), ("js", vec![Entity::FieldDefinition], false), ("tag", vec![Entity::Object], false), + ("alias", vec![Entity::EnumValueDefinition], false), ]; } @@ -77,6 +78,7 @@ enum Entity { Schema, Object, FieldDefinition, + EnumValueDefinition, } trait ToGraphql { @@ -95,6 +97,9 @@ impl ToGraphql for Entity { Entity::FieldDefinition => { write!(f, "FIELD_DEFINITION") } + Entity::EnumValueDefinition => { + write!(f, "ENUM_VALUE_DEFINITION") + } } } } diff --git a/tests/core/snapshots/async-cache-enable-multiple-resolvers.md_merged.snap b/tests/core/snapshots/async-cache-enable-multiple-resolvers.md_merged.snap index 71c14d8ddf..fc93f85386 100644 --- a/tests/core/snapshots/async-cache-enable-multiple-resolvers.md_merged.snap +++ b/tests/core/snapshots/async-cache-enable-multiple-resolvers.md_merged.snap @@ -3,8 +3,8 @@ source: tests/core/spec.rs expression: formatter --- schema - @server(port: 8000, queryValidation: false) - @upstream(baseURL: "http://jsonplaceholder.typicode.com", dedupe: true) { + @server(dedupe: true, port: 8000, queryValidation: false) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { query: Query } diff --git a/tests/core/snapshots/async-cache-enabled.md_merged.snap b/tests/core/snapshots/async-cache-enabled.md_merged.snap index 6122c63f2b..5c670d71b6 100644 --- a/tests/core/snapshots/async-cache-enabled.md_merged.snap +++ b/tests/core/snapshots/async-cache-enabled.md_merged.snap @@ -3,8 +3,8 @@ source: tests/core/spec.rs expression: formatter --- schema - @server(port: 8000, queryValidation: false) - @upstream(baseURL: "http://jsonplaceholder.typicode.com", dedupe: true) { + @server(dedupe: true, port: 8000, queryValidation: false) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { query: Query } diff --git a/tests/core/snapshots/async-cache-global.md_0.snap b/tests/core/snapshots/async-cache-global.md_0.snap new file mode 100644 index 0000000000..62ed8b07b6 --- /dev/null +++ b/tests/core/snapshots/async-cache-global.md_0.snap @@ -0,0 +1,24 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "posts": [ + { + "id": 1, + "userId": 1 + }, + { + "id": 2, + "userId": 2 + } + ] + } + } +} diff --git a/tests/core/snapshots/async-cache-global.md_client.snap b/tests/core/snapshots/async-cache-global.md_client.snap new file mode 100644 index 0000000000..531e10f76b --- /dev/null +++ b/tests/core/snapshots/async-cache-global.md_client.snap @@ -0,0 +1,52 @@ +--- +source: tests/core/spec.rs +expression: formatted +--- +scalar Bytes + +scalar Date + +scalar Email + +scalar Empty + +scalar Int128 + +scalar Int16 + +scalar Int32 + +scalar Int64 + +scalar Int8 + +scalar JSON + +scalar PhoneNumber + +type Post { + body: String + id: Int + title: String + userId: Int! +} + +type Query { + posts: [Post] +} + +scalar UInt128 + +scalar UInt16 + +scalar UInt32 + +scalar UInt64 + +scalar UInt8 + +scalar Url + +schema { + query: Query +} diff --git a/tests/core/snapshots/async-cache-global.md_merged.snap b/tests/core/snapshots/async-cache-global.md_merged.snap new file mode 100644 index 0000000000..cbf1b19790 --- /dev/null +++ b/tests/core/snapshots/async-cache-global.md_merged.snap @@ -0,0 +1,25 @@ +--- +source: tests/core/spec.rs +expression: formatter +--- +schema + @server(dedupe: true, port: 8000, queryValidation: false) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { + query: Query +} + +type Post { + body: String + id: Int + title: String + userId: Int! +} + +type Query { + posts: [Post] @http(path: "/posts?id=1") +} + +type User { + id: Int + name: String +} diff --git a/tests/core/snapshots/async-cache-inflight-request.md_0.snap b/tests/core/snapshots/async-cache-inflight-request.md_0.snap new file mode 100644 index 0000000000..b2709ed123 --- /dev/null +++ b/tests/core/snapshots/async-cache-inflight-request.md_0.snap @@ -0,0 +1,26 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "posts": [ + { + "user": { + "name": "Leanne Graham" + } + }, + { + "user": { + "name": "Leanne Graham" + } + } + ] + } + } +} diff --git a/tests/core/snapshots/async-cache-inflight-request.md_client.snap b/tests/core/snapshots/async-cache-inflight-request.md_client.snap new file mode 100644 index 0000000000..16a291c915 --- /dev/null +++ b/tests/core/snapshots/async-cache-inflight-request.md_client.snap @@ -0,0 +1,58 @@ +--- +source: tests/core/spec.rs +expression: formatted +--- +scalar Bytes + +scalar Date + +scalar Email + +scalar Empty + +scalar Int128 + +scalar Int16 + +scalar Int32 + +scalar Int64 + +scalar Int8 + +scalar JSON + +scalar PhoneNumber + +type Post { + body: String + id: Int + title: String + user: User + userId: Int! +} + +type Query { + posts: [Post] +} + +scalar UInt128 + +scalar UInt16 + +scalar UInt32 + +scalar UInt64 + +scalar UInt8 + +scalar Url + +type User { + id: Int + name: String +} + +schema { + query: Query +} diff --git a/tests/core/snapshots/async-cache-inflight-request.md_merged.snap b/tests/core/snapshots/async-cache-inflight-request.md_merged.snap new file mode 100644 index 0000000000..5c670d71b6 --- /dev/null +++ b/tests/core/snapshots/async-cache-inflight-request.md_merged.snap @@ -0,0 +1,26 @@ +--- +source: tests/core/spec.rs +expression: formatter +--- +schema + @server(dedupe: true, port: 8000, queryValidation: false) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { + query: Query +} + +type Post { + body: String + id: Int + title: String + user: User @http(path: "/users/{{.value.userId}}") + userId: Int! +} + +type Query { + posts: [Post] @http(path: "/posts?id=1") +} + +type User { + id: Int + name: String +} diff --git a/tests/core/snapshots/dedupe_batch_query_execution.md_0.snap b/tests/core/snapshots/dedupe_batch_query_execution.md_0.snap new file mode 100644 index 0000000000..62ed8b07b6 --- /dev/null +++ b/tests/core/snapshots/dedupe_batch_query_execution.md_0.snap @@ -0,0 +1,24 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "posts": [ + { + "id": 1, + "userId": 1 + }, + { + "id": 2, + "userId": 2 + } + ] + } + } +} diff --git a/tests/core/snapshots/dedupe_batch_query_execution.md_client.snap b/tests/core/snapshots/dedupe_batch_query_execution.md_client.snap new file mode 100644 index 0000000000..531e10f76b --- /dev/null +++ b/tests/core/snapshots/dedupe_batch_query_execution.md_client.snap @@ -0,0 +1,52 @@ +--- +source: tests/core/spec.rs +expression: formatted +--- +scalar Bytes + +scalar Date + +scalar Email + +scalar Empty + +scalar Int128 + +scalar Int16 + +scalar Int32 + +scalar Int64 + +scalar Int8 + +scalar JSON + +scalar PhoneNumber + +type Post { + body: String + id: Int + title: String + userId: Int! +} + +type Query { + posts: [Post] +} + +scalar UInt128 + +scalar UInt16 + +scalar UInt32 + +scalar UInt64 + +scalar UInt8 + +scalar Url + +schema { + query: Query +} diff --git a/tests/core/snapshots/dedupe_batch_query_execution.md_merged.snap b/tests/core/snapshots/dedupe_batch_query_execution.md_merged.snap new file mode 100644 index 0000000000..cbf1b19790 --- /dev/null +++ b/tests/core/snapshots/dedupe_batch_query_execution.md_merged.snap @@ -0,0 +1,25 @@ +--- +source: tests/core/spec.rs +expression: formatter +--- +schema + @server(dedupe: true, port: 8000, queryValidation: false) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { + query: Query +} + +type Post { + body: String + id: Int + title: String + userId: Int! +} + +type Query { + posts: [Post] @http(path: "/posts?id=1") +} + +type User { + id: Int + name: String +} diff --git a/tests/core/snapshots/default-value-arg.md_0.snap b/tests/core/snapshots/default-value-arg.md_0.snap new file mode 100644 index 0000000000..c3d9995606 --- /dev/null +++ b/tests/core/snapshots/default-value-arg.md_0.snap @@ -0,0 +1,15 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "bar": 1 + } + } +} diff --git a/tests/core/snapshots/default-value-arg.md_1.snap b/tests/core/snapshots/default-value-arg.md_1.snap new file mode 100644 index 0000000000..7fb28bfef9 --- /dev/null +++ b/tests/core/snapshots/default-value-arg.md_1.snap @@ -0,0 +1,15 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "bar": 2 + } + } +} diff --git a/tests/core/snapshots/default-value-arg.md_client.snap b/tests/core/snapshots/default-value-arg.md_client.snap new file mode 100644 index 0000000000..913b7b4b0f --- /dev/null +++ b/tests/core/snapshots/default-value-arg.md_client.snap @@ -0,0 +1,49 @@ +--- +source: tests/core/spec.rs +expression: formatted +--- +scalar Bytes + +scalar Date + +scalar Email + +scalar Empty + +input Input { + id: Int! +} + +scalar Int128 + +scalar Int16 + +scalar Int32 + +scalar Int64 + +scalar Int8 + +scalar JSON + +scalar PhoneNumber + +type Query { + bar(input: Input = {id: 1}): Int +} + +scalar UInt128 + +scalar UInt16 + +scalar UInt32 + +scalar UInt64 + +scalar UInt8 + +scalar Url + +schema { + query: Query +} diff --git a/tests/core/snapshots/default-value-arg.md_merged.snap b/tests/core/snapshots/default-value-arg.md_merged.snap new file mode 100644 index 0000000000..027b09d6f7 --- /dev/null +++ b/tests/core/snapshots/default-value-arg.md_merged.snap @@ -0,0 +1,15 @@ +--- +source: tests/core/spec.rs +expression: formatter +--- +schema @server @upstream(baseURL: "http://abc.com") { + query: Query +} + +input Input { + id: Int! +} + +type Query { + bar(input: Input = {id: 1}): Int @http(path: "/bar/{{.args.input.id}}") +} diff --git a/tests/core/snapshots/default-value-config.md_client.snap b/tests/core/snapshots/default-value-config.md_client.snap new file mode 100644 index 0000000000..2db365a1f7 --- /dev/null +++ b/tests/core/snapshots/default-value-config.md_client.snap @@ -0,0 +1,50 @@ +--- +source: tests/core/spec.rs +expression: formatted +--- +scalar Bytes + +scalar Date + +scalar Email + +scalar Empty + +input Input { + id: Int = 1 +} + +scalar Int128 + +scalar Int16 + +scalar Int32 + +scalar Int64 + +scalar Int8 + +scalar JSON + +scalar PhoneNumber + +type Query { + bar(input: Input = {id: 3}): Int + foo(input: Input!): Int +} + +scalar UInt128 + +scalar UInt16 + +scalar UInt32 + +scalar UInt64 + +scalar UInt8 + +scalar Url + +schema { + query: Query +} diff --git a/tests/core/snapshots/default-value-config.md_merged.snap b/tests/core/snapshots/default-value-config.md_merged.snap new file mode 100644 index 0000000000..0016893a7c --- /dev/null +++ b/tests/core/snapshots/default-value-config.md_merged.snap @@ -0,0 +1,16 @@ +--- +source: tests/core/spec.rs +expression: formatter +--- +schema @server @upstream(baseURL: "http://abc.com") { + query: Query +} + +input Input { + id: Int = 1 +} + +type Query { + bar(input: Input = {id: 3}): Int @http(path: "/foo/{{.args.input.id}}") + foo(input: Input!): Int @http(path: "/foo/{{.args.input.id}}") +} diff --git a/tests/core/snapshots/recursive-type.md_client.snap b/tests/core/snapshots/recursive-type.md_client.snap new file mode 100644 index 0000000000..cce9f9cf66 --- /dev/null +++ b/tests/core/snapshots/recursive-type.md_client.snap @@ -0,0 +1,51 @@ +--- +source: tests/core/spec.rs +expression: formatted +--- +scalar Bytes + +type Color { + colors: [Color] + isColorPageExists: Boolean + isColorsImageAvailable: Boolean +} + +scalar Date + +scalar Email + +scalar Empty + +scalar Int128 + +scalar Int16 + +scalar Int32 + +scalar Int64 + +scalar Int8 + +scalar JSON + +scalar PhoneNumber + +type Query { + color: Color +} + +scalar UInt128 + +scalar UInt16 + +scalar UInt32 + +scalar UInt64 + +scalar UInt8 + +scalar Url + +schema { + query: Query +} diff --git a/tests/core/snapshots/recursive-type.md_merged.snap b/tests/core/snapshots/recursive-type.md_merged.snap new file mode 100644 index 0000000000..7140507092 --- /dev/null +++ b/tests/core/snapshots/recursive-type.md_merged.snap @@ -0,0 +1,17 @@ +--- +source: tests/core/spec.rs +expression: formatter +--- +schema @server @upstream { + query: Query +} + +type Color { + colors: [Color] + isColorPageExists: Boolean + isColorsImageAvailable: Boolean +} + +type Query { + color: Color @http(baseURL: "https://color.com", path: "/") +} diff --git a/tests/core/snapshots/test-enum-aliases.md_0.snap b/tests/core/snapshots/test-enum-aliases.md_0.snap new file mode 100644 index 0000000000..14e26a9d66 --- /dev/null +++ b/tests/core/snapshots/test-enum-aliases.md_0.snap @@ -0,0 +1,16 @@ +--- +source: tests/core/spec.rs +assertion_line: 200 +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "foo": "BAR" + } + } +} diff --git a/tests/core/snapshots/test-enum-aliases.md_client.snap b/tests/core/snapshots/test-enum-aliases.md_client.snap new file mode 100644 index 0000000000..79bb52466f --- /dev/null +++ b/tests/core/snapshots/test-enum-aliases.md_client.snap @@ -0,0 +1,51 @@ +--- +source: tests/core/spec.rs +assertion_line: 278 +expression: formatted +--- +scalar Bytes + +scalar Date + +scalar Email + +scalar Empty + +enum Foo { + BAR + BAZ +} + +scalar Int128 + +scalar Int16 + +scalar Int32 + +scalar Int64 + +scalar Int8 + +scalar JSON + +scalar PhoneNumber + +type Query { + foo: Foo +} + +scalar UInt128 + +scalar UInt16 + +scalar UInt32 + +scalar UInt64 + +scalar UInt8 + +scalar Url + +schema { + query: Query +} diff --git a/tests/core/snapshots/test-enum-aliases.md_merged.snap b/tests/core/snapshots/test-enum-aliases.md_merged.snap new file mode 100644 index 0000000000..8d8c47dade --- /dev/null +++ b/tests/core/snapshots/test-enum-aliases.md_merged.snap @@ -0,0 +1,17 @@ +--- +source: tests/core/spec.rs +assertion_line: 235 +expression: formatter +--- +schema @server @upstream(baseURL: "http://localhost:8080") { + query: Query +} + +enum Foo { + BAR @alias(options: ["OP1", "OP2"]) + BAZ +} + +type Query { + foo: Foo @expr(body: "OP1") +} diff --git a/tests/execution/async-cache-enable-multiple-resolvers.md b/tests/execution/async-cache-enable-multiple-resolvers.md index 7756b5b7f0..74e0bcc410 100644 --- a/tests/execution/async-cache-enable-multiple-resolvers.md +++ b/tests/execution/async-cache-enable-multiple-resolvers.md @@ -2,8 +2,8 @@ ```graphql @config schema - @server(port: 8000, queryValidation: false) - @upstream(baseURL: "http://jsonplaceholder.typicode.com", dedupe: true) { + @server(port: 8000, queryValidation: false, dedupe: true) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { query: Query } diff --git a/tests/execution/async-cache-enabled.md b/tests/execution/async-cache-enabled.md index 2f85e72a15..622b4334e3 100644 --- a/tests/execution/async-cache-enabled.md +++ b/tests/execution/async-cache-enabled.md @@ -2,8 +2,8 @@ ```graphql @config schema - @server(port: 8000, queryValidation: false) - @upstream(baseURL: "http://jsonplaceholder.typicode.com", dedupe: true) { + @server(port: 8000, queryValidation: false, dedupe: true) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { query: Query } diff --git a/tests/execution/async-cache-global.md b/tests/execution/async-cache-global.md new file mode 100644 index 0000000000..95d21ea045 --- /dev/null +++ b/tests/execution/async-cache-global.md @@ -0,0 +1,45 @@ +# Async Cache Inflight Enabled + +```graphql @config +schema + @server(port: 8000, queryValidation: false, dedupe: true) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { + query: Query +} + +type Query { + posts: [Post] @http(path: "/posts?id=1") +} + +type Post { + id: Int + title: String + body: String + userId: Int! +} + +type User { + id: Int + name: String +} +``` + +```yml @mock +- request: + method: GET + url: http://jsonplaceholder.typicode.com/posts?id=1 + response: + status: 200 + body: + - id: 1 + userId: 1 + - id: 2 + userId: 2 +``` + +```yml @test +- method: POST + url: http://localhost:8080/graphql + body: + query: query { posts { id, userId } } +``` diff --git a/tests/execution/async-cache-inflight-request.md b/tests/execution/async-cache-inflight-request.md new file mode 100644 index 0000000000..7b9aeb61f2 --- /dev/null +++ b/tests/execution/async-cache-inflight-request.md @@ -0,0 +1,55 @@ +# Async Cache Inflight and InRequest + +```graphql @config +schema + @server(port: 8000, queryValidation: false, dedupe: true) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { + query: Query +} + +type Query { + posts: [Post] @http(path: "/posts?id=1") +} + +type Post { + id: Int + title: String + body: String + userId: Int! + user: User @http(path: "/users/{{.value.userId}}") +} + +type User { + id: Int + name: String +} +``` + +```yml @mock +- request: + method: GET + url: http://jsonplaceholder.typicode.com/posts?id=1 + response: + status: 200 + body: + - id: 1 + userId: 1 + - id: 1 + userId: 1 +- request: + method: GET + url: http://jsonplaceholder.typicode.com/users/1 + expectedHits: 1 + response: + status: 200 + body: + id: 1 + name: Leanne Graham +``` + +```yml @test +- method: POST + url: http://localhost:8080/graphql + body: + query: query { posts { user { name } } } +``` diff --git a/tests/execution/dedupe_batch_query_execution.md b/tests/execution/dedupe_batch_query_execution.md new file mode 100644 index 0000000000..95d21ea045 --- /dev/null +++ b/tests/execution/dedupe_batch_query_execution.md @@ -0,0 +1,45 @@ +# Async Cache Inflight Enabled + +```graphql @config +schema + @server(port: 8000, queryValidation: false, dedupe: true) + @upstream(baseURL: "http://jsonplaceholder.typicode.com") { + query: Query +} + +type Query { + posts: [Post] @http(path: "/posts?id=1") +} + +type Post { + id: Int + title: String + body: String + userId: Int! +} + +type User { + id: Int + name: String +} +``` + +```yml @mock +- request: + method: GET + url: http://jsonplaceholder.typicode.com/posts?id=1 + response: + status: 200 + body: + - id: 1 + userId: 1 + - id: 2 + userId: 2 +``` + +```yml @test +- method: POST + url: http://localhost:8080/graphql + body: + query: query { posts { id, userId } } +``` diff --git a/tests/execution/default-value-arg.md b/tests/execution/default-value-arg.md new file mode 100644 index 0000000000..a1735e2929 --- /dev/null +++ b/tests/execution/default-value-arg.md @@ -0,0 +1,48 @@ +# default value for input Type + +```graphql @config +schema @upstream(baseURL: "http://abc.com") { + query: Query +} + +type Query { + bar(input: Input = {id: 1}): Int @http(path: "/bar/{{.args.input.id}}") +} + +input Input { + id: Int! +} +``` + +```yml @mock +- request: + method: GET + url: http://abc.com/bar/1 + response: + status: 200 + body: 1 + +- request: + method: GET + url: http://abc.com/bar/2 + response: + status: 200 + body: 2 +``` + +```yml @test +- method: POST + url: http://localhost:8080/graphql + body: + query: > + query { + bar + } +- method: POST + url: http://localhost:8080/graphql + body: + query: > + query { + bar(input: {id:2}) + } +``` diff --git a/tests/execution/default-value-config.md b/tests/execution/default-value-config.md new file mode 100644 index 0000000000..865a2a4ff7 --- /dev/null +++ b/tests/execution/default-value-config.md @@ -0,0 +1,16 @@ +# default value for input Type + +```graphql @config +schema @upstream(baseURL: "http://abc.com") { + query: Query +} + +type Query { + foo(input: Input!): Int @http(path: "/foo/{{.args.input.id}}") + bar(input: Input = {id: 3}): Int @http(path: "/foo/{{.args.input.id}}") +} + +input Input { + id: Int = 1 +} +``` diff --git a/tests/execution/recursive-type.md b/tests/execution/recursive-type.md new file mode 100644 index 0000000000..3cdc0e3dbc --- /dev/null +++ b/tests/execution/recursive-type.md @@ -0,0 +1,15 @@ +```graphql @config +schema { + query: Query +} + +type Query { + color: Color @http(baseURL: "https://color.com", path: "/") +} + +type Color { + colors: [Color] + isColorPageExists: Boolean + isColorsImageAvailable: Boolean +} +``` diff --git a/tests/execution/test-enum-aliases.md b/tests/execution/test-enum-aliases.md new file mode 100644 index 0000000000..2150205703 --- /dev/null +++ b/tests/execution/test-enum-aliases.md @@ -0,0 +1,27 @@ +--- +identity: true +--- + +# test-enum-aliases + +```graphql @config +schema @server @upstream(baseURL: "http://localhost:8080") { + query: Query +} + +enum Foo { + BAR @alias(options: ["OP1", "OP2"]) + BAZ +} + +type Query { + foo: Foo @expr(body: "OP1") +} +``` + +```yml @test +- method: POST + url: http://localhost:8080/graphql + body: + query: "query { foo }" +``` diff --git a/tests/expression_spec.rs b/tests/expression_spec.rs index 942cbd6919..4ab9945c31 100644 --- a/tests/expression_spec.rs +++ b/tests/expression_spec.rs @@ -12,8 +12,8 @@ mod tests { let runtime = tailcall::cli::runtime::init(&Blueprint::default()); let req_ctx = RequestContext::new(runtime); let res_ctx = EmptyResolverContext {}; - let eval_ctx = EvaluationContext::new(&req_ctx, &res_ctx); - expr.eval(eval_ctx).await + let mut eval_ctx = EvaluationContext::new(&req_ctx, &res_ctx); + expr.eval(&mut eval_ctx).await } #[tokio::test]