diff --git a/Cargo.lock b/Cargo.lock
index b1a3482b1f..21d0ed4fab 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -643,7 +643,7 @@ dependencies = [
"bitflags 2.6.0",
"cexpr",
"clang-sys",
- "itertools 0.10.5",
+ "itertools 0.12.1",
"lazy_static",
"lazycell",
"log",
@@ -1017,12 +1017,6 @@ dependencies = [
"wasm-bindgen",
]
-[[package]]
-name = "convert_case"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
-
[[package]]
name = "convert_case"
version = "0.6.0"
@@ -1359,19 +1353,6 @@ dependencies = [
"syn 1.0.109",
]
-[[package]]
-name = "derive_more"
-version = "0.99.18"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce"
-dependencies = [
- "convert_case 0.4.0",
- "proc-macro2",
- "quote",
- "rustc_version",
- "syn 2.0.86",
-]
-
[[package]]
name = "derive_more"
version = "1.0.0"
@@ -1646,9 +1627,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
-version = "1.0.34"
+version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0"
+checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c"
dependencies = [
"crc32fast",
"miniz_oxide",
@@ -1822,7 +1803,7 @@ version = "0.1.7-wip"
source = "git+https://github.com/laststylebender14/rust-genai.git?rev=63a542ce20132503c520f4e07108e0d768f243c3#63a542ce20132503c520f4e07108e0d768f243c3"
dependencies = [
"bytes",
- "derive_more 1.0.0",
+ "derive_more",
"eventsource-stream",
"futures",
"reqwest 0.12.7",
@@ -2454,7 +2435,7 @@ dependencies = [
"http 1.1.0",
"hyper 1.4.1",
"hyper-util",
- "rustls 0.23.16",
+ "rustls 0.23.18",
"rustls-pki-types",
"tokio",
"tokio-rustls 0.26.0",
@@ -2868,7 +2849,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
- "windows-targets 0.48.5",
+ "windows-targets 0.52.6",
]
[[package]]
@@ -4314,7 +4295,7 @@ dependencies = [
"quinn-proto",
"quinn-udp",
"rustc-hash 2.0.0",
- "rustls 0.23.16",
+ "rustls 0.23.18",
"socket2",
"thiserror",
"tokio",
@@ -4331,7 +4312,7 @@ dependencies = [
"rand",
"ring",
"rustc-hash 2.0.0",
- "rustls 0.23.16",
+ "rustls 0.23.18",
"slab",
"thiserror",
"tinyvec",
@@ -4578,7 +4559,7 @@ dependencies = [
"percent-encoding",
"pin-project-lite",
"quinn",
- "rustls 0.23.16",
+ "rustls 0.23.18",
"rustls-pemfile 2.1.3",
"rustls-pki-types",
"serde",
@@ -4682,7 +4663,7 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c602d3f35d1a5725235ef874b9a9e24232534e34fe610d53657e24c6c37572d"
dependencies = [
- "convert_case 0.6.0",
+ "convert_case",
"fnv",
"ident_case",
"indexmap 2.6.0",
@@ -4771,9 +4752,9 @@ dependencies = [
[[package]]
name = "rustls"
-version = "0.23.16"
+version = "0.23.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e"
+checksum = "9c9cc1d47e243d655ace55ed38201c19ae02c148ae56412ab8750e8f0166ab7f"
dependencies = [
"once_cell",
"ring",
@@ -5461,13 +5442,13 @@ dependencies = [
"chrono",
"clap",
"colored",
- "convert_case 0.6.0",
+ "convert_case",
"criterion",
"ctrlc",
"dashmap",
"datatest-stable",
"derive-getters",
- "derive_more 0.99.18",
+ "derive_more",
"derive_setters",
"dotenvy",
"exitcode",
@@ -5527,7 +5508,7 @@ dependencies = [
"reqwest-middleware",
"resource",
"rquickjs",
- "rustls 0.23.16",
+ "rustls 0.23.18",
"rustls-pemfile 1.0.4",
"rustls-pki-types",
"schemars",
@@ -5589,9 +5570,9 @@ dependencies = [
[[package]]
name = "tailcall-chunk"
-version = "0.2.5"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d684c265789778d9b76a268b079bca63dd8801d695e3c1d7f41df78e7a7adb4f"
+checksum = "5d244d8876e9677c9699d5254b72366c9249760d73a8b7295d1fb3eb6333f682"
[[package]]
name = "tailcall-cloudflare"
@@ -5620,8 +5601,8 @@ dependencies = [
name = "tailcall-fixtures"
version = "0.1.0"
dependencies = [
- "convert_case 0.6.0",
- "derive_more 0.99.18",
+ "convert_case",
+ "derive_more",
"indenter",
]
@@ -5673,8 +5654,8 @@ version = "0.1.0"
dependencies = [
"async-trait",
"chrono",
- "convert_case 0.6.0",
- "derive_more 0.99.18",
+ "convert_case",
+ "derive_more",
"http 0.2.12",
"lazy_static",
"machineid-rs",
@@ -5721,7 +5702,7 @@ dependencies = [
name = "tailcall-upstream-grpc"
version = "0.1.0"
dependencies = [
- "derive_more 0.99.18",
+ "derive_more",
"headers",
"http 0.2.12",
"http-body-util",
@@ -6028,7 +6009,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4"
dependencies = [
- "rustls 0.23.16",
+ "rustls 0.23.18",
"rustls-pki-types",
"tokio",
]
diff --git a/Cargo.toml b/Cargo.toml
index 7303fb7ca8..f03aef860f 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -24,7 +24,7 @@ tracing = "0.1.40"
lazy_static = "1.4.0"
serde_json = { version = "1.0.116", features = ["preserve_order"] }
serde = { version = "1.0.200", features = ["derive"] }
-derive_more = "0.99.18"
+derive_more = { version = "1", features = ["from", "debug"] }
thiserror = "1.0.59"
url = { version = "2.5.0", features = ["serde"] }
convert_case = "0.6.0"
@@ -175,7 +175,7 @@ strum = "0.26.2"
tailcall-valid = { workspace = true }
dashmap = "6.1.0"
urlencoding = "2.1.3"
-tailcall-chunk = "0.2.5"
+tailcall-chunk = "0.3.0"
# to build rquickjs bindings on systems without builtin bindings
[target.'cfg(all(target_os = "windows", target_arch = "x86"))'.dependencies]
diff --git a/README.md b/README.md
index 145ffe1c0d..572d66b7b8 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-[![Tailcall Logo](https://raw.githubusercontent.com/tailcallhq/tailcall/main/assets/logo_main.svg)](https://tailcall.run)
+[![Tailcall Logo](https://raw.githubusercontent.com/tailcallhq/tailcall/refs/heads/main/assets/logo_light.svg)](https://tailcall.run)
Tailcall is an open-source solution for building [high-performance] GraphQL backends.
diff --git a/assets/logo_light.svg b/assets/logo_light.svg
new file mode 100644
index 0000000000..9f42b8ae70
--- /dev/null
+++ b/assets/logo_light.svg
@@ -0,0 +1,20 @@
+
diff --git a/generated/.tailcallrc.graphql b/generated/.tailcallrc.graphql
index b80a628218..a484e6864e 100644
--- a/generated/.tailcallrc.graphql
+++ b/generated/.tailcallrc.graphql
@@ -47,7 +47,7 @@ directive @call(
of the previous step is passed as input to the next step.
"""
steps: [Step]
-) on FIELD_DEFINITION | OBJECT
+) repeatable on FIELD_DEFINITION | OBJECT
"""
The `@expr` operators allows you to specify an expression that can evaluate to a
@@ -55,7 +55,7 @@ value. The expression can be a static value or built form a Mustache template. s
"""
directive @expr(
body: JSON
-) on FIELD_DEFINITION | OBJECT
+) repeatable on FIELD_DEFINITION | OBJECT
"""
The @graphQL operator allows to specify GraphQL API server request to fetch data
@@ -95,7 +95,7 @@ directive @graphQL(
This refers URL of the API.
"""
url: String!
-) on FIELD_DEFINITION | OBJECT
+) repeatable on FIELD_DEFINITION | OBJECT
"""
The @grpc operator indicates that a field or node is backed by a gRPC API.For instance,
@@ -149,7 +149,7 @@ directive @grpc(
This refers to URL of the API.
"""
url: String!
-) on FIELD_DEFINITION | OBJECT
+) repeatable on FIELD_DEFINITION | OBJECT
"""
The @http operator indicates that a field or node is backed by a REST API.For instance,
@@ -229,11 +229,11 @@ directive @http(
This refers to URL of the API.
"""
url: String!
-) on FIELD_DEFINITION | OBJECT
+) repeatable on FIELD_DEFINITION | OBJECT
directive @js(
name: String!
-) on FIELD_DEFINITION | OBJECT
+) repeatable on FIELD_DEFINITION | OBJECT
"""
The @link directive allows you to import external resources, such as configuration
@@ -1026,14 +1026,41 @@ enum Method {
}
enum LinkType {
+ """
+ Points to another Tailcall Configuration file. The imported configuration will be merged into the importing configuration.
+ """
Config
+ """
+ Points to a Protobuf file. The imported Protobuf file will be used by the `@grpc` directive. If your API exposes a reflection endpoint, you should set the type to `Grpc` instead.
+ """
Protobuf
+ """
+ Points to a JS file. The imported JS file will be used by the `@js` directive.
+ """
Script
+ """
+ Points to a Cert file. The imported Cert file will be used by the server to serve over HTTPS.
+ """
Cert
+ """
+ Points to a Key file. The imported Key file will be used by the server to serve over HTTPS.
+ """
Key
+ """
+ A trusted document that contains GraphQL operations (queries, mutations) that can be exposed a REST API using the `@rest` directive.
+ """
Operation
+ """
+ Points to a Htpasswd file. The imported Htpasswd file will be used by the server to authenticate users.
+ """
Htpasswd
+ """
+ Points to a Jwks file. The imported Jwks file will be used by the server to authenticate users.
+ """
Jwks
+ """
+ Points to a reflection endpoint. The imported reflection endpoint will be used by the `@grpc` directive to resolve data from gRPC services.
+ """
Grpc
}
diff --git a/generated/.tailcallrc.schema.json b/generated/.tailcallrc.schema.json
index 484b1356fb..f02274dcb4 100644
--- a/generated/.tailcallrc.schema.json
+++ b/generated/.tailcallrc.schema.json
@@ -400,81 +400,13 @@
},
"Field": {
"description": "A field definition containing all the metadata information about resolving a field.",
- "type": "object",
- "oneOf": [
- {
- "type": "object",
- "required": [
- "http"
- ],
- "properties": {
- "http": {
- "$ref": "#/definitions/Http"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "grpc"
- ],
- "properties": {
- "grpc": {
- "$ref": "#/definitions/Grpc"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "graphql"
- ],
- "properties": {
- "graphql": {
- "$ref": "#/definitions/GraphQL"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "call"
- ],
- "properties": {
- "call": {
- "$ref": "#/definitions/Call"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "js"
- ],
- "properties": {
- "js": {
- "$ref": "#/definitions/JS"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "expr"
- ],
- "properties": {
- "expr": {
- "$ref": "#/definitions/Expr"
- }
- },
- "additionalProperties": false
- }
+ "type": [
+ "object",
+ "array"
],
+ "items": {
+ "$ref": "#/definitions/Resolver"
+ },
"properties": {
"args": {
"description": "Map of argument name and its definition.",
@@ -901,17 +833,70 @@
"additionalProperties": false
},
"LinkType": {
- "type": "string",
- "enum": [
- "Config",
- "Protobuf",
- "Script",
- "Cert",
- "Key",
- "Operation",
- "Htpasswd",
- "Jwks",
- "Grpc"
+ "oneOf": [
+ {
+ "description": "Points to another Tailcall Configuration file. The imported configuration will be merged into the importing configuration.",
+ "type": "string",
+ "enum": [
+ "Config"
+ ]
+ },
+ {
+ "description": "Points to a Protobuf file. The imported Protobuf file will be used by the `@grpc` directive. If your API exposes a reflection endpoint, you should set the type to `Grpc` instead.",
+ "type": "string",
+ "enum": [
+ "Protobuf"
+ ]
+ },
+ {
+ "description": "Points to a JS file. The imported JS file will be used by the `@js` directive.",
+ "type": "string",
+ "enum": [
+ "Script"
+ ]
+ },
+ {
+ "description": "Points to a Cert file. The imported Cert file will be used by the server to serve over HTTPS.",
+ "type": "string",
+ "enum": [
+ "Cert"
+ ]
+ },
+ {
+ "description": "Points to a Key file. The imported Key file will be used by the server to serve over HTTPS.",
+ "type": "string",
+ "enum": [
+ "Key"
+ ]
+ },
+ {
+ "description": "A trusted document that contains GraphQL operations (queries, mutations) that can be exposed a REST API using the `@rest` directive.",
+ "type": "string",
+ "enum": [
+ "Operation"
+ ]
+ },
+ {
+ "description": "Points to a Htpasswd file. The imported Htpasswd file will be used by the server to authenticate users.",
+ "type": "string",
+ "enum": [
+ "Htpasswd"
+ ]
+ },
+ {
+ "description": "Points to a Jwks file. The imported Jwks file will be used by the server to authenticate users.",
+ "type": "string",
+ "enum": [
+ "Jwks"
+ ]
+ },
+ {
+ "description": "Points to a reflection endpoint. The imported reflection endpoint will be used by the `@grpc` directive to resolve data from gRPC services.",
+ "type": "string",
+ "enum": [
+ "Grpc"
+ ]
+ }
]
},
"Method": {
@@ -1021,6 +1006,82 @@
}
}
},
+ "Resolver": {
+ "oneOf": [
+ {
+ "type": "object",
+ "required": [
+ "http"
+ ],
+ "properties": {
+ "http": {
+ "$ref": "#/definitions/Http"
+ }
+ },
+ "additionalProperties": false
+ },
+ {
+ "type": "object",
+ "required": [
+ "grpc"
+ ],
+ "properties": {
+ "grpc": {
+ "$ref": "#/definitions/Grpc"
+ }
+ },
+ "additionalProperties": false
+ },
+ {
+ "type": "object",
+ "required": [
+ "graphql"
+ ],
+ "properties": {
+ "graphql": {
+ "$ref": "#/definitions/GraphQL"
+ }
+ },
+ "additionalProperties": false
+ },
+ {
+ "type": "object",
+ "required": [
+ "call"
+ ],
+ "properties": {
+ "call": {
+ "$ref": "#/definitions/Call"
+ }
+ },
+ "additionalProperties": false
+ },
+ {
+ "type": "object",
+ "required": [
+ "js"
+ ],
+ "properties": {
+ "js": {
+ "$ref": "#/definitions/JS"
+ }
+ },
+ "additionalProperties": false
+ },
+ {
+ "type": "object",
+ "required": [
+ "expr"
+ ],
+ "properties": {
+ "expr": {
+ "$ref": "#/definitions/Expr"
+ }
+ },
+ "additionalProperties": false
+ }
+ ]
+ },
"RootSchema": {
"type": "object",
"properties": {
@@ -1336,81 +1397,13 @@
},
"Type": {
"description": "Represents a GraphQL type. A type can be an object, interface, enum or scalar.",
- "type": "object",
- "oneOf": [
- {
- "type": "object",
- "required": [
- "http"
- ],
- "properties": {
- "http": {
- "$ref": "#/definitions/Http"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "grpc"
- ],
- "properties": {
- "grpc": {
- "$ref": "#/definitions/Grpc"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "graphql"
- ],
- "properties": {
- "graphql": {
- "$ref": "#/definitions/GraphQL"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "call"
- ],
- "properties": {
- "call": {
- "$ref": "#/definitions/Call"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "js"
- ],
- "properties": {
- "js": {
- "$ref": "#/definitions/JS"
- }
- },
- "additionalProperties": false
- },
- {
- "type": "object",
- "required": [
- "expr"
- ],
- "properties": {
- "expr": {
- "$ref": "#/definitions/Expr"
- }
- },
- "additionalProperties": false
- }
+ "type": [
+ "object",
+ "array"
],
+ "items": {
+ "$ref": "#/definitions/Resolver"
+ },
"required": [
"fields"
],
diff --git a/src/cli/tc/init.rs b/src/cli/tc/init.rs
index f3064e6194..9797ebf8c2 100644
--- a/src/cli/tc/init.rs
+++ b/src/cli/tc/init.rs
@@ -90,7 +90,7 @@ async fn confirm_and_write_yml(
fn main_config() -> Config {
let field = Field {
type_of: Type::from("String".to_owned()).into_required(),
- resolver: Some(Resolver::Expr(Expr { body: "Hello, World!".into() })),
+ resolvers: Resolver::Expr(Expr { body: "Hello, World!".into() }).into(),
..Default::default()
};
diff --git a/src/cli/tc/run.rs b/src/cli/tc/run.rs
index 938a43ec5d..61f93f4394 100644
--- a/src/cli/tc/run.rs
+++ b/src/cli/tc/run.rs
@@ -16,7 +16,7 @@ pub async fn run() -> Result<()> {
tracing::info!("Env file: {:?} loaded", path);
}
let cli = Cli::parse();
- update_checker::check_for_update().await;
+ tokio::task::spawn(update_checker::check_for_update());
// Initialize ping event every 60 seconds
let _ = TRACKER
.init_ping(tokio::time::Duration::from_secs(60))
diff --git a/src/cli/update_checker.rs b/src/cli/update_checker.rs
index 89852ccf4a..f471ba4412 100644
--- a/src/cli/update_checker.rs
+++ b/src/cli/update_checker.rs
@@ -75,22 +75,20 @@ fn show_update_message(name: &str, latest_version: Version) {
}
pub async fn check_for_update() {
- tokio::task::spawn_blocking(|| {
- if VERSION.is_dev() {
- // skip validation if it's not a release
- return;
- }
+ if VERSION.is_dev() {
+ // skip validation if it's not a release
+ return;
+ }
- let name: &str = "tailcallhq/tailcall";
+ let name: &str = "tailcallhq/tailcall";
- let informer = update_informer::new(registry::GitHub, name, VERSION.as_str());
+ let informer = update_informer::new(registry::GitHub, name, VERSION.as_str());
- if let Some(latest_version) = informer.check_version().ok().flatten() {
- // schedules the update message to be shown when the user presses Ctrl+C on cli.
- let _ = set_handler(move || {
- show_update_message(name, latest_version.clone());
- std::process::exit(exitcode::OK);
- });
- }
- });
+ if let Some(latest_version) = informer.check_version().ok().flatten() {
+ // schedules the update message to be shown when the user presses Ctrl+C on cli.
+ let _ = set_handler(move || {
+ show_update_message(name, latest_version.clone());
+ std::process::exit(exitcode::OK);
+ });
+ }
}
diff --git a/src/core/blueprint/cors.rs b/src/core/blueprint/cors.rs
index b871d1905e..13edf8b0a1 100644
--- a/src/core/blueprint/cors.rs
+++ b/src/core/blueprint/cors.rs
@@ -1,11 +1,9 @@
-use std::fmt::Display;
-
use derive_setters::Setters;
-use http::header;
-use http::header::{HeaderName, HeaderValue};
+use http::header::{self, HeaderName, HeaderValue, InvalidHeaderValue};
use http::request::Parts;
use tailcall_valid::ValidationError;
+use super::BlueprintError;
use crate::core::config;
#[derive(Clone, Debug, Setters, Default)]
@@ -118,7 +116,9 @@ impl Cors {
}
}
-fn ensure_usable_cors_rules(layer: &Cors) -> Result<(), ValidationError> {
+fn ensure_usable_cors_rules(
+ layer: &Cors,
+) -> Result<(), ValidationError> {
if layer.allow_credentials {
let allowing_all_headers = layer
.allow_headers
@@ -127,8 +127,11 @@ fn ensure_usable_cors_rules(layer: &Cors) -> Result<(), ValidationError>
.is_some();
if allowing_all_headers {
- Err(ValidationError::new("Invalid CORS configuration: Cannot combine `Access-Control-Allow-Credentials: true` \
- with `Access-Control-Allow-Headers: *`".into()))?
+ return Err(ValidationError::new(
+ BlueprintError::InvalidCORSConfiguration(
+ "Access-Control-Allow-Headers".to_string(),
+ ),
+ ));
}
let allowing_all_methods = layer
@@ -138,33 +141,38 @@ fn ensure_usable_cors_rules(layer: &Cors) -> Result<(), ValidationError>
.is_some();
if allowing_all_methods {
- Err(ValidationError::new("Invalid CORS configuration: Cannot combine `Access-Control-Allow-Credentials: true` \
- with `Access-Control-Allow-Methods: *`".into()))?
+ return Err(ValidationError::new(
+ BlueprintError::InvalidCORSConfiguration(
+ "Access-Control-Allow-Methods".to_string(),
+ ),
+ ));
}
let allowing_all_origins = layer.allow_origins.iter().any(is_wildcard);
if allowing_all_origins {
- Err(ValidationError::new("Invalid CORS configuration: Cannot combine `Access-Control-Allow-Credentials: true` \
- with `Access-Control-Allow-Origin: *`".into()))?
+ return Err(ValidationError::new(
+ BlueprintError::InvalidCORSConfiguration("Access-Control-Allow-Origin".to_string()),
+ ));
}
if layer.expose_headers_is_wildcard() {
- Err(ValidationError::new("Invalid CORS configuration: Cannot combine `Access-Control-Allow-Credentials: true` \
- with `Access-Control-Expose-Headers: *`".into()))?
+ return Err(ValidationError::new(
+ BlueprintError::InvalidCORSConfiguration(
+ "Access-Control-Expose-Headers".to_string(),
+ ),
+ ));
}
}
Ok(())
}
-fn to_validation_err(err: T) -> ValidationError {
- ValidationError::new(err.to_string())
-}
-
impl TryFrom for Cors {
- type Error = ValidationError;
+ type Error = ValidationError;
- fn try_from(value: config::cors::Cors) -> Result> {
+ fn try_from(
+ value: config::cors::Cors,
+ ) -> Result> {
let cors = Cors {
allow_credentials: value.allow_credentials.unwrap_or_default(),
allow_headers: (!value.allow_headers.is_empty()).then_some(
@@ -172,11 +180,12 @@ impl TryFrom for Cors {
.allow_headers
.join(", ")
.parse()
- .map_err(to_validation_err)?,
+ .map_err(|e: InvalidHeaderValue| ValidationError::new(e.into()))?,
),
allow_methods: {
Some(if value.allow_methods.is_empty() {
- "*".parse().map_err(to_validation_err)?
+ "*".parse()
+ .map_err(|e: InvalidHeaderValue| ValidationError::new(e.into()))?
} else {
value
.allow_methods
@@ -185,28 +194,34 @@ impl TryFrom for Cors {
.collect::>()
.join(", ")
.parse()
- .map_err(to_validation_err)?
+ .map_err(|e: InvalidHeaderValue| ValidationError::new(e.into()))?
})
},
allow_origins: value
.allow_origins
.into_iter()
- .map(|val| val.parse().map_err(to_validation_err))
- .collect::>>()?,
+ .map(|val| {
+ val.parse()
+ .map_err(|e: InvalidHeaderValue| ValidationError::new(e.into()))
+ })
+ .collect::>>()?,
allow_private_network: value.allow_private_network.unwrap_or_default(),
expose_headers: Some(
value
.expose_headers
.join(", ")
.parse()
- .map_err(to_validation_err)?,
+ .map_err(|e: InvalidHeaderValue| ValidationError::new(e.into()))?,
),
max_age: value.max_age.map(|val| val.into()),
vary: value
.vary
.iter()
- .map(|val| val.parse().map_err(to_validation_err))
- .collect::>>()?,
+ .map(|val| {
+ val.parse()
+ .map_err(|e: InvalidHeaderValue| ValidationError::new(e.into()))
+ })
+ .collect::>>()?,
};
ensure_usable_cors_rules(&cors)?;
Ok(cors)
diff --git a/src/core/blueprint/definitions.rs b/src/core/blueprint/definitions.rs
index 15dc40bb41..a38b498d7e 100644
--- a/src/core/blueprint/definitions.rs
+++ b/src/core/blueprint/definitions.rs
@@ -14,9 +14,9 @@ use crate::core::ir::model::{Cache, IR};
use crate::core::try_fold::TryFold;
use crate::core::{config, scalar, Type};
-pub fn to_scalar_type_definition(name: &str) -> Valid {
+pub fn to_scalar_type_definition(name: &str) -> Valid {
if scalar::Scalar::is_predefined(name) {
- Valid::fail(format!("Scalar type {} is predefined", name))
+ Valid::fail(BlueprintError::ScalarTypeIsPredefined(name.to_string()))
} else {
Valid::succeed(Definition::Scalar(ScalarTypeDefinition {
name: name.to_string(),
@@ -40,7 +40,7 @@ pub fn to_union_type_definition((name, u): (&String, &Union)) -> Definition {
pub fn to_input_object_type_definition(
definition: ObjectTypeDefinition,
-) -> Valid {
+) -> Valid {
Valid::succeed(Definition::InputObject(InputObjectTypeDefinition {
name: definition.name,
fields: definition
@@ -58,7 +58,9 @@ pub fn to_input_object_type_definition(
}))
}
-pub fn to_interface_type_definition(definition: ObjectTypeDefinition) -> Valid {
+pub fn to_interface_type_definition(
+ definition: ObjectTypeDefinition,
+) -> Valid {
Valid::succeed(Definition::Interface(InterfaceTypeDefinition {
name: definition.name,
fields: definition.fields,
@@ -68,8 +70,8 @@ pub fn to_interface_type_definition(definition: ObjectTypeDefinition) -> Valid Valid;
-type PathResolverErrorHandler = dyn Fn(&str, &str, &str, &[String]) -> Valid;
+type InvalidPathHandler = dyn Fn(&str, &[String], &[String]) -> Valid;
+type PathResolverErrorHandler = dyn Fn(&str, &str, &str, &[String]) -> Valid;
struct ProcessFieldWithinTypeContext<'a> {
field: &'a config::Field,
@@ -96,7 +98,9 @@ struct ProcessPathContext<'a> {
original_path: &'a [String],
}
-fn process_field_within_type(context: ProcessFieldWithinTypeContext) -> Valid {
+fn process_field_within_type(
+ context: ProcessFieldWithinTypeContext,
+) -> Valid {
let field = context.field;
let field_name = context.field_name;
let remaining_path = context.remaining_path;
@@ -107,14 +111,19 @@ fn process_field_within_type(context: ProcessFieldWithinTypeContext) -> Valid Valid Valid {
+fn process_path(context: ProcessPathContext) -> Valid {
let path = context.path;
let field = context.field;
let type_info = context.type_info;
@@ -254,7 +263,7 @@ fn to_object_type_definition(
name: &str,
type_of: &config::Type,
config_module: &ConfigModule,
-) -> Valid {
+) -> Valid {
to_fields(name, type_of, config_module).map(|fields| {
Definition::Object(ObjectTypeDefinition {
name: name.to_string(),
@@ -266,10 +275,13 @@ fn to_object_type_definition(
})
}
-fn update_args<'a>(
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &str), FieldDefinition, String>::new(
+fn update_args<'a>() -> TryFold<
+ 'a,
+ (&'a ConfigModule, &'a Field, &'a config::Type, &'a str),
+ FieldDefinition,
+ BlueprintError,
+> {
+ TryFold::<(&ConfigModule, &Field, &config::Type, &str), FieldDefinition, BlueprintError>::new(
move |(_, field, _typ, name), _| {
// TODO: assert type name
Valid::from_iter(field.args.iter(), |(name, arg)| {
@@ -303,7 +315,7 @@ fn item_is_numeric(list: &[String]) -> bool {
fn update_resolver_from_path(
context: &ProcessPathContext,
base_field: blueprint::FieldDefinition,
-) -> Valid {
+) -> Valid {
let has_index = item_is_numeric(context.path);
process_path(context.clone()).and_then(|of_type| {
@@ -328,10 +340,13 @@ fn update_resolver_from_path(
/// resolvers that cannot be resolved from the root of the schema. This function
/// finds such dangling resolvers and creates a resolvable path from the root
/// schema.
-pub fn fix_dangling_resolvers<'a>(
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &str), FieldDefinition, String>::new(
+pub fn fix_dangling_resolvers<'a>() -> TryFold<
+ 'a,
+ (&'a ConfigModule, &'a Field, &'a config::Type, &'a str),
+ FieldDefinition,
+ BlueprintError,
+> {
+ TryFold::<(&ConfigModule, &Field, &config::Type, &str), FieldDefinition, BlueprintError>::new(
move |(config, field, _, name), mut b_field| {
let mut set = HashSet::new();
if !field.has_resolver()
@@ -349,10 +364,13 @@ pub fn fix_dangling_resolvers<'a>(
/// Wraps the IO Expression with Expression::Cached
/// if `Field::cache` is present for that field
-pub fn update_cache_resolvers<'a>(
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &str), FieldDefinition, String>::new(
+pub fn update_cache_resolvers<'a>() -> TryFold<
+ 'a,
+ (&'a ConfigModule, &'a Field, &'a config::Type, &'a str),
+ FieldDefinition,
+ BlueprintError,
+> {
+ TryFold::<(&ConfigModule, &Field, &config::Type, &str), FieldDefinition, BlueprintError>::new(
move |(_config, field, typ, _name), mut b_field| {
if let Some(config::Cache { max_age }) = field.cache.as_ref().or(typ.cache.as_ref()) {
b_field.map_expr(|expression| Cache::wrap(*max_age, expression))
@@ -363,10 +381,10 @@ pub fn update_cache_resolvers<'a>(
)
}
-fn validate_field_type_exist(config: &Config, field: &Field) -> Valid<(), String> {
+fn validate_field_type_exist(config: &Config, field: &Field) -> Valid<(), BlueprintError> {
let field_type = field.type_of.name();
if !scalar::Scalar::is_predefined(field_type) && !config.contains(field_type) {
- Valid::fail(format!("Undeclared type '{field_type}' was found"))
+ Valid::fail(BlueprintError::UndeclaredTypeFound(field_type.clone()))
} else {
Valid::succeed(())
}
@@ -376,7 +394,7 @@ fn to_fields(
object_name: &str,
type_of: &config::Type,
config_module: &ConfigModule,
-) -> Valid, String> {
+) -> Valid, BlueprintError> {
let operation_type = if config_module
.schema
.mutation
@@ -388,28 +406,55 @@ fn to_fields(
GraphQLOperationType::Query
};
// Process fields that are not marked as `omit`
+
+ // collect the parent auth ids
+ let parent_auth_ids = type_of.protected.as_ref().and_then(|p| p.id.as_ref());
+ // collect the field names that have different auth ids than the parent type
+ let fields_with_different_auth_ids = type_of
+ .fields
+ .iter()
+ .filter_map(|(k, v)| {
+ if let Some(p) = &v.protected {
+ if p.id.as_ref() != parent_auth_ids {
+ Some(k)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ })
+ .collect::>();
+
let fields = Valid::from_iter(
type_of
.fields
.iter()
.filter(|(_, field)| !field.is_omitted()),
|(name, field)| {
- validate_field_type_exist(config_module, field)
- .and(to_field_definition(
+ let mut result =
+ validate_field_type_exist(config_module, field).and(to_field_definition(
field,
&operation_type,
object_name,
config_module,
type_of,
name,
- ))
- .trace(name)
+ ));
+
+ if fields_with_different_auth_ids.contains(name) || parent_auth_ids.is_none() {
+ // if the field has a different auth id than the parent type or parent has no
+ // auth id, we need to add correct trace.
+ result = result.trace(name);
+ }
+
+ result
},
);
let to_added_field = |add_field: &config::AddField,
type_of: &config::Type|
- -> Valid {
+ -> Valid {
let source_field = type_of
.fields
.iter()
@@ -424,20 +469,21 @@ fn to_fields(
&add_field.name,
)
.and_then(|field_definition| {
- let added_field_path = match source_field.resolver {
- Some(_) => add_field.path[1..]
+ let added_field_path = if source_field.resolvers.is_empty() {
+ add_field.path.clone()
+ } else {
+ add_field.path[1..]
.iter()
.map(|s| s.to_owned())
- .collect::>(),
- None => add_field.path.clone(),
+ .collect::>()
};
let invalid_path_handler = |field_name: &str,
_added_field_path: &[String],
original_path: &[String]|
- -> Valid {
+ -> Valid {
Valid::fail_with(
- "Cannot add field".to_string(),
- format!("Path [{}] does not exist", original_path.join(", ")),
+ BlueprintError::CannotAddField,
+ BlueprintError::PathDoesNotExist(original_path.join(", ")),
)
.trace(field_name)
};
@@ -445,15 +491,14 @@ fn to_fields(
field_type: &str,
field_name: &str,
original_path: &[String]|
- -> Valid {
- Valid::::fail_with(
- "Cannot add field".to_string(),
- format!(
- "Path: [{}] contains resolver {} at [{}.{}]",
+ -> Valid {
+ Valid::::fail_with(
+ BlueprintError::CannotAddField,
+ BlueprintError::PathContainsResolver(
original_path.join(", "),
- resolver_name,
- field_type,
- field_name
+ resolver_name.to_string(),
+ field_type.to_string(),
+ field_name.to_string(),
),
)
};
@@ -472,10 +517,9 @@ fn to_fields(
)
})
.trace(config::AddField::trace_name().as_str()),
- None => Valid::fail(format!(
- "Could not find field {} in path {}",
- add_field.path[0],
- add_field.path.join(",")
+ None => Valid::fail(BlueprintError::FieldNotFoundInPath(
+ add_field.path[0].clone(),
+ add_field.path.join(","),
)),
}
};
@@ -497,15 +541,10 @@ pub fn to_field_definition(
config_module: &ConfigModule,
type_of: &config::Type,
name: &str,
-) -> Valid {
+) -> Valid {
update_args()
- .and(update_http().trace(config::Http::trace_name().as_str()))
- .and(update_grpc(operation_type).trace(config::Grpc::trace_name().as_str()))
- .and(update_const_field().trace(config::Expr::trace_name().as_str()))
- .and(update_js_field().trace(config::JS::trace_name().as_str()))
- .and(update_graphql(operation_type).trace(config::GraphQL::trace_name().as_str()))
+ .and(update_resolver(operation_type, object_name))
.and(update_modify().trace(config::Modify::trace_name().as_str()))
- .and(update_call(operation_type, object_name).trace(config::Call::trace_name().as_str()))
.and(fix_dangling_resolvers())
.and(update_cache_resolvers())
.and(update_protected(object_name).trace(Protected::trace_name().as_str()))
@@ -518,8 +557,8 @@ pub fn to_field_definition(
)
}
-pub fn to_definitions<'a>() -> TryFold<'a, ConfigModule, Vec, String> {
- TryFold::, String>::new(|config_module, _| {
+pub fn to_definitions<'a>() -> TryFold<'a, ConfigModule, Vec, BlueprintError> {
+ TryFold::, BlueprintError>::new(|config_module, _| {
Valid::from_iter(config_module.types.iter(), |(name, type_)| {
if type_.scalar() {
to_scalar_type_definition(name).trace(name)
@@ -548,7 +587,7 @@ pub fn to_definitions<'a>() -> TryFold<'a, ConfigModule, Vec, String
config_module.enums.iter(),
|(name, type_)| {
if type_.variants.is_empty() {
- Valid::fail("No variants found for enum".to_string())
+ Valid::fail(BlueprintError::NoVariantsFoundForEnum)
} else {
Valid::succeed(to_enum_type_definition((name, type_)))
}
diff --git a/src/core/blueprint/directive.rs b/src/core/blueprint/directive.rs
index 3ef9045a3b..99333908ab 100644
--- a/src/core/blueprint/directive.rs
+++ b/src/core/blueprint/directive.rs
@@ -5,6 +5,7 @@ use async_graphql::Name;
use serde_json::Value;
use tailcall_valid::{Valid, ValidationError, Validator};
+use super::BlueprintError;
use crate::core::{config, pos};
#[derive(Clone, Debug)]
@@ -13,8 +14,8 @@ pub struct Directive {
pub arguments: HashMap,
}
-pub fn to_directive(const_directive: ConstDirective) -> Valid {
- const_directive
+pub fn to_directive(const_directive: ConstDirective) -> Valid {
+ match const_directive
.arguments
.into_iter()
.map(|(k, v)| {
@@ -25,7 +26,10 @@ pub fn to_directive(const_directive: ConstDirective) -> Valid
.collect::>()
.map_err(|e| ValidationError::new(e.to_string()))
.map(|arguments| Directive { name: const_directive.name.node.to_string(), arguments })
- .into()
+ {
+ Ok(data) => Valid::succeed(data),
+ Err(e) => Valid::from_validation_err(BlueprintError::from_validation_string(e)),
+ }
}
pub fn to_const_directive(directive: &Directive) -> Valid {
diff --git a/src/core/blueprint/error.rs b/src/core/blueprint/error.rs
new file mode 100644
index 0000000000..a8980ac8e7
--- /dev/null
+++ b/src/core/blueprint/error.rs
@@ -0,0 +1,271 @@
+use std::net::AddrParseError;
+
+use async_graphql::dynamic::SchemaError;
+use tailcall_valid::{Cause, ValidationError};
+
+use crate::core::Errata;
+
+#[derive(Debug, thiserror::Error)]
+pub enum BlueprintError {
+ #[error("Apollo federation resolvers can't be a part of entity resolver")]
+ ApolloFederationResolversNoPartOfEntityResolver,
+
+ #[error("Query type is not an object inside the blueprint")]
+ QueryTypeNotObject,
+
+ #[error("Cannot find type {0} in the config")]
+ TypeNotFoundInConfig(String),
+
+ #[error("Cannot find field {0} in the type")]
+ FieldNotFoundInType(String),
+
+ #[error("no argument '{0}' found")]
+ ArgumentNotFound(String),
+
+ #[error("field {0} has no resolver")]
+ FieldHasNoResolver(String),
+
+ #[error("Steps can't be empty")]
+ StepsCanNotBeEmpty,
+
+ #[error("Result resolver can't be empty")]
+ ResultResolverCanNotBeEmpty,
+
+ #[error("call must have query or mutation")]
+ CallMustHaveQueryOrMutation,
+
+ #[error("invalid JSON: {0}")]
+ InvalidJson(anyhow::Error),
+
+ #[error("field {0} not found")]
+ FieldNotFound(String),
+
+ #[error("Invalid method format: {0}. Expected format is ..")]
+ InvalidGrpcMethodFormat(String),
+
+ #[error("Protobuf files were not specified in the config")]
+ ProtobufFilesNotSpecifiedInConfig,
+
+ #[error("GroupBy is only supported for GET requests")]
+ GroupByOnlyForGet,
+
+ #[error("Batching capability was used without enabling it in upstream")]
+ IncorrectBatchingUsage,
+
+ #[error("script is required")]
+ ScriptIsRequired,
+
+ #[error("Field is already implemented from interface")]
+ FieldExistsInInterface,
+
+ #[error("Input types can not be protected")]
+ InputTypesCannotBeProtected,
+
+ #[error("@protected operator is used but there is no @link definitions for auth providers")]
+ ProtectedOperatorNoAuthProviders,
+
+ #[error("Auth provider {0} not found")]
+ AuthProviderNotFound(String),
+
+ #[error("syntax error when parsing `{0}`")]
+ SyntaxErrorWhenParsing(String),
+
+ #[error("Scalar type {0} is predefined")]
+ ScalarTypeIsPredefined(String),
+
+ #[error("Undeclared type '{0}' was found")]
+ UndeclaredTypeFound(String),
+
+ #[error("Cannot add field")]
+ CannotAddField,
+
+ #[error("Path [{0}] does not exist")]
+ PathDoesNotExist(String),
+
+ #[error("Path: [{0}] contains resolver {1} at [{2}.{3}]")]
+ PathContainsResolver(String, String, String, String),
+
+ #[error("Could not find field {0} in path {1}")]
+ FieldNotFoundInPath(String, String),
+
+ #[error("No variants found for enum")]
+ NoVariantsFoundForEnum,
+
+ #[error("Link src cannot be empty")]
+ LinkSrcCannotBeEmpty,
+
+ #[error("Duplicated id: {0}")]
+ Duplicated(String),
+
+ #[error("Only one script link is allowed")]
+ OnlyOneScriptLinkAllowed,
+
+ #[error("Only one key link is allowed")]
+ OnlyOneKeyLinkAllowed,
+
+ #[error("no value '{0}' found")]
+ NoValueFound(String),
+
+ #[error("value '{0}' is a nullable type")]
+ ValueIsNullableType(String),
+
+ #[error("value '{0}' is not of a scalar type")]
+ ValueIsNotOfScalarType(String),
+
+ #[error("no type '{0}' found")]
+ NoTypeFound(String),
+
+ #[error("too few parts in template")]
+ TooFewPartsInTemplate,
+
+ #[error("can't use list type '{0}' here")]
+ CantUseListTypeHere(String),
+
+ #[error("argument '{0}' is a nullable type")]
+ ArgumentIsNullableType(String),
+
+ #[error("var '{0}' is not set in the server config")]
+ VarNotSetInServerConfig(String),
+
+ #[error("unknown template directive '{0}'")]
+ UnknownTemplateDirective(String),
+
+ #[error("Query root is missing")]
+ QueryRootIsMissing,
+
+ #[error("Query type is not defined")]
+ QueryTypeNotDefined,
+
+ #[error("No resolver has been found in the schema")]
+ NoResolverFoundInSchema,
+
+ #[error("Mutation type is not defined")]
+ MutationTypeNotDefined,
+
+ #[error("Certificate is required for HTTP2")]
+ CertificateIsRequiredForHTTP2,
+
+ #[error("Key is required for HTTP2")]
+ KeyIsRequiredForHTTP2,
+
+ #[error("Experimental headers must start with 'x-' or 'X-'. Got: '{0}'")]
+ ExperimentalHeaderInvalidFormat(String),
+
+ #[error("`graph_ref` should be in the format @ where `graph_id` and `variant` can only contain letters, numbers, '-' and '_'. Found {0}")]
+ InvalidGraphRef(String),
+
+ #[error("Invalid CORS configuration: Cannot combine `Access-Control-Allow-Credentials: true` with `{0}: *`")]
+ InvalidCORSConfiguration(String),
+
+ #[error("{0}")]
+ Cause(String),
+
+ #[error("{0}")]
+ Description(String),
+
+ #[error("Parsing failed because of {0}")]
+ ParsingFailed(#[from] AddrParseError),
+
+ #[error(transparent)]
+ Schema(#[from] SchemaError),
+
+ #[error(transparent)]
+ UrlParse(#[from] url::ParseError),
+
+ #[error("Parsing failed because of {0}")]
+ InvalidHeaderName(#[from] http::header::InvalidHeaderName),
+
+ #[error("Parsing failed because of {0}")]
+ InvalidHeaderValue(#[from] http::header::InvalidHeaderValue),
+
+ #[error(transparent)]
+ Error(#[from] anyhow::Error),
+}
+
+impl PartialEq for BlueprintError {
+ fn eq(&self, other: &Self) -> bool {
+ self.to_string() == other.to_string()
+ }
+}
+
+impl From> for Errata {
+ fn from(error: ValidationError) -> Self {
+ Errata::new("Blueprint Error").caused_by(
+ error
+ .as_vec()
+ .iter()
+ .map(|cause| {
+ let mut err =
+ Errata::new(&cause.message.to_string()).trace(cause.trace.clone().into());
+ if let Some(description) = &cause.description {
+ err = err.description(description.to_string());
+ }
+ err
+ })
+ .collect(),
+ )
+ }
+}
+
+impl BlueprintError {
+ pub fn to_validation_string(
+ errors: ValidationError,
+ ) -> ValidationError {
+ let causes: Vec> = errors
+ .as_vec()
+ .iter()
+ .map(|cause| {
+ let new_cause =
+ Cause::new(cause.message.to_string()).trace(cause.trace.clone().into());
+
+ if let Some(description) = &cause.description {
+ new_cause.description(description.to_string())
+ } else {
+ new_cause
+ }
+ })
+ .collect();
+
+ ValidationError::from(causes)
+ }
+
+ pub fn from_validation_str(errors: ValidationError<&str>) -> ValidationError {
+ let causes: Vec> = errors
+ .as_vec()
+ .iter()
+ .map(|cause| {
+ let new_cause = Cause::new(BlueprintError::Cause(cause.message.to_string()))
+ .trace(cause.trace.clone().into());
+
+ if let Some(description) = cause.description {
+ new_cause.description(BlueprintError::Description(description.to_string()))
+ } else {
+ new_cause
+ }
+ })
+ .collect();
+
+ ValidationError::from(causes)
+ }
+
+ pub fn from_validation_string(
+ errors: ValidationError,
+ ) -> ValidationError {
+ let causes: Vec> = errors
+ .as_vec()
+ .iter()
+ .map(|cause| {
+ let new_cause = Cause::new(BlueprintError::Cause(cause.message.to_string()))
+ .trace(cause.trace.clone().into());
+
+ if let Some(description) = &cause.description {
+ new_cause.description(BlueprintError::Description(description.to_string()))
+ } else {
+ new_cause
+ }
+ })
+ .collect();
+
+ ValidationError::from(causes)
+ }
+}
diff --git a/src/core/blueprint/fixture/recursive-arg.graphql b/src/core/blueprint/fixture/recursive-arg.graphql
new file mode 100644
index 0000000000..a124e657b5
--- /dev/null
+++ b/src/core/blueprint/fixture/recursive-arg.graphql
@@ -0,0 +1,10 @@
+schema @server(port: 8000) {
+ query: Query
+}
+type Query {
+ posts(id: PostData): Int @http(url: "upstream.com", query: [{key: "id", value: "{{.args.id.data}}"}])
+}
+type PostData {
+ author: String
+ data: PostData
+}
diff --git a/src/core/blueprint/from_config.rs b/src/core/blueprint/from_config.rs
index e9a8d094b3..fb95066b8e 100644
--- a/src/core/blueprint/from_config.rs
+++ b/src/core/blueprint/from_config.rs
@@ -15,7 +15,7 @@ use crate::core::json::JsonSchema;
use crate::core::try_fold::TryFold;
use crate::core::Type;
-pub fn config_blueprint<'a>() -> TryFold<'a, ConfigModule, Blueprint, String> {
+pub fn config_blueprint<'a>() -> TryFold<'a, ConfigModule, Blueprint, BlueprintError> {
let server = TryFoldConfig::::new(|config_module, blueprint| {
Valid::from(Server::try_from(config_module.clone())).map(|server| blueprint.server(server))
});
@@ -88,7 +88,7 @@ pub fn to_json_schema(type_of: &Type, config: &Config) -> JsonSchema {
if let Some(type_) = type_ {
let mut schema_fields = BTreeMap::new();
for (name, field) in type_.fields.iter() {
- if field.resolver.is_none() {
+ if field.resolvers.is_empty() {
schema_fields.insert(name.clone(), to_json_schema(&field.type_of, config));
}
}
@@ -116,20 +116,24 @@ pub fn to_json_schema(type_of: &Type, config: &Config) -> JsonSchema {
}
impl TryFrom<&ConfigModule> for Blueprint {
- type Error = ValidationError;
+ type Error = ValidationError;
fn try_from(config_module: &ConfigModule) -> Result {
config_blueprint()
.try_fold(
// Apply required transformers to the configuration
- &config_module.to_owned().transform(Required).to_result()?,
+ &config_module
+ .to_owned()
+ .transform(Required)
+ .to_result()
+ .map_err(BlueprintError::from_validation_string)?,
Blueprint::default(),
)
.and_then(|blueprint| {
let schema_builder = SchemaBuilder::from(&blueprint);
match schema_builder.finish() {
Ok(_) => Valid::succeed(blueprint),
- Err(e) => Valid::fail(e.to_string()),
+ Err(e) => Valid::fail(e.into()),
}
})
.to_result()
diff --git a/src/core/blueprint/interface_resolver.rs b/src/core/blueprint/interface_resolver.rs
index 8fca58cbc5..282ab7fa5c 100644
--- a/src/core/blueprint/interface_resolver.rs
+++ b/src/core/blueprint/interface_resolver.rs
@@ -2,6 +2,7 @@ use std::collections::BTreeSet;
use tailcall_valid::{Valid, Validator};
+use super::BlueprintError;
use crate::core::blueprint::FieldDefinition;
use crate::core::config::{ConfigModule, Discriminate, Field, Type};
use crate::core::ir::model::IR;
@@ -12,19 +13,25 @@ fn compile_interface_resolver(
interface_name: &str,
interface_types: &BTreeSet,
discriminate: &Option,
-) -> Valid {
+) -> Valid {
let typename_field = discriminate.as_ref().map(|d| d.get_field());
- Discriminator::new(
+ match Discriminator::new(
interface_name.to_string(),
interface_types.clone(),
typename_field,
)
+ .to_result()
+ {
+ Ok(data) => Valid::succeed(data),
+ Err(err) => Valid::from_validation_err(BlueprintError::from_validation_string(err)),
+ }
}
pub fn update_interface_resolver<'a>(
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a Type, &'a str), FieldDefinition, String> {
- TryFold::<(&ConfigModule, &Field, &Type, &str), FieldDefinition, String>::new(
+) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a Type, &'a str), FieldDefinition, BlueprintError>
+{
+ TryFold::<(&ConfigModule, &Field, &Type, &str), FieldDefinition, BlueprintError>::new(
|(config, field, _, _), mut b_field| {
let Some(interface_types) = config.interfaces_types_map().get(field.type_of.name())
else {
diff --git a/src/core/blueprint/links.rs b/src/core/blueprint/links.rs
index bd9ad5a766..defe7a7f1e 100644
--- a/src/core/blueprint/links.rs
+++ b/src/core/blueprint/links.rs
@@ -1,19 +1,20 @@
use tailcall_valid::{Valid, ValidationError, Validator};
+use super::BlueprintError;
use crate::core::config::{Link, LinkType};
use crate::core::directive::DirectiveCodec;
pub struct Links;
impl TryFrom> for Links {
- type Error = ValidationError;
+ type Error = ValidationError;
fn try_from(links: Vec) -> Result {
Valid::from_iter(links.iter().enumerate(), |(pos, link)| {
Valid::succeed(link.to_owned())
.and_then(|link| {
if link.src.is_empty() {
- Valid::fail("Link src cannot be empty".to_string())
+ Valid::fail(BlueprintError::LinkSrcCannotBeEmpty)
} else {
Valid::succeed(link)
}
@@ -21,7 +22,7 @@ impl TryFrom> for Links {
.and_then(|link| {
if let Some(id) = &link.id {
if links.iter().filter(|l| l.id.as_ref() == Some(id)).count() > 1 {
- return Valid::fail(format!("Duplicated id: {}", id));
+ return Valid::fail(BlueprintError::Duplicated(id.clone()));
}
}
Valid::succeed(link)
@@ -35,7 +36,7 @@ impl TryFrom> for Links {
.collect::>();
if script_links.len() > 1 {
- Valid::fail("Only one script link is allowed".to_string())
+ Valid::fail(BlueprintError::OnlyOneScriptLinkAllowed)
} else {
Valid::succeed(links)
}
@@ -47,7 +48,7 @@ impl TryFrom> for Links {
.collect::>();
if key_links.len() > 1 {
- Valid::fail("Only one key link is allowed".to_string())
+ Valid::fail(BlueprintError::OnlyOneKeyLinkAllowed)
} else {
Valid::succeed(links)
}
diff --git a/src/core/blueprint/mod.rs b/src/core/blueprint/mod.rs
index 93c97302d0..91bdc0f773 100644
--- a/src/core/blueprint/mod.rs
+++ b/src/core/blueprint/mod.rs
@@ -5,6 +5,7 @@ mod cors;
mod definitions;
mod directive;
mod dynamic_value;
+mod error;
mod from_config;
mod index;
mod interface_resolver;
@@ -16,6 +17,7 @@ mod operators;
mod schema;
mod server;
pub mod telemetry;
+mod template_validation;
mod timeout;
mod union_resolver;
mod upstream;
@@ -25,6 +27,7 @@ pub use blueprint::*;
pub use cors::*;
pub use definitions::*;
pub use dynamic_value::*;
+pub use error::*;
pub use from_config::*;
pub use index::*;
pub use links::*;
@@ -37,4 +40,4 @@ pub use upstream::*;
use crate::core::config::ConfigModule;
use crate::core::try_fold::TryFold;
-pub type TryFoldConfig<'a, A> = TryFold<'a, ConfigModule, A, String>;
+pub type TryFoldConfig<'a, A> = TryFold<'a, ConfigModule, A, BlueprintError>;
diff --git a/src/core/blueprint/mustache.rs b/src/core/blueprint/mustache.rs
index 8845aaa5ac..1718de2feb 100644
--- a/src/core/blueprint/mustache.rs
+++ b/src/core/blueprint/mustache.rs
@@ -1,7 +1,8 @@
use tailcall_valid::{Valid, Validator};
-use super::FieldDefinition;
+use super::{BlueprintError, FieldDefinition};
use crate::core::config::{self, Config};
+use crate::core::directive::DirectiveCodec;
use crate::core::ir::model::{IO, IR};
use crate::core::scalar;
@@ -16,22 +17,19 @@ impl<'a> MustachePartsValidator<'a> {
Self { type_of, config, field }
}
- fn validate_type(&self, parts: &[String], is_query: bool) -> Result<(), String> {
+ fn validate_type(&self, parts: &[String], is_query: bool) -> Result<(), BlueprintError> {
let mut len = parts.len();
let mut type_of = self.type_of;
for item in parts {
let field = type_of.fields.get(item).ok_or_else(|| {
- format!(
- "no value '{}' found",
- parts[0..parts.len() - len + 1].join(".").as_str()
- )
+ BlueprintError::NoValueFound(parts[0..parts.len() - len + 1].join("."))
})?;
let val_type = &field.type_of;
if !is_query && val_type.is_nullable() {
- return Err(format!("value '{}' is a nullable type", item.as_str()));
+ return Err(BlueprintError::ValueIsNullableType(item.clone()));
} else if len == 1 && !scalar::Scalar::is_predefined(val_type.name()) {
- return Err(format!("value '{}' is not of a scalar type", item.as_str()));
+ return Err(BlueprintError::ValueIsNotOfScalarType(item.clone()));
} else if len == 1 {
break;
}
@@ -39,7 +37,7 @@ impl<'a> MustachePartsValidator<'a> {
type_of = self
.config
.find_type(val_type.name())
- .ok_or_else(|| format!("no type '{}' found", parts.join(".").as_str()))?;
+ .ok_or_else(|| BlueprintError::NoTypeFound(parts.join(".")))?;
len -= 1;
}
@@ -47,12 +45,12 @@ impl<'a> MustachePartsValidator<'a> {
Ok(())
}
- fn validate(&self, parts: &[String], is_query: bool) -> Valid<(), String> {
+ fn validate(&self, parts: &[String], is_query: bool) -> Valid<(), BlueprintError> {
let config = self.config;
let args = &self.field.args;
if parts.len() < 2 {
- return Valid::fail("too few parts in template".to_string());
+ return Valid::fail(BlueprintError::TooFewPartsInTemplate);
}
let head = parts[0].as_str();
@@ -73,20 +71,22 @@ impl<'a> MustachePartsValidator<'a> {
// most cases
if let Some(arg) = args.iter().find(|arg| arg.name == tail) {
if !is_query && arg.of_type.is_list() {
- return Valid::fail(format!("can't use list type '{tail}' here"));
+ return Valid::fail(BlueprintError::CantUseListTypeHere(tail.to_string()));
}
// we can use non-scalar types in args
if !is_query && arg.default_value.is_none() && arg.of_type.is_nullable() {
- return Valid::fail(format!("argument '{tail}' is a nullable type"));
+ return Valid::fail(BlueprintError::ArgumentIsNullableType(
+ tail.to_string(),
+ ));
}
} else {
- return Valid::fail(format!("no argument '{tail}' found"));
+ return Valid::fail(BlueprintError::ArgumentNotFound(tail.to_string()));
}
}
"vars" => {
if !config.server.vars.iter().any(|vars| vars.key == tail) {
- return Valid::fail(format!("var '{tail}' is not set in the server config"));
+ return Valid::fail(BlueprintError::VarNotSetInServerConfig(tail.to_string()));
}
}
"headers" | "env" => {
@@ -94,49 +94,43 @@ impl<'a> MustachePartsValidator<'a> {
// we can't validate here
}
_ => {
- return Valid::fail(format!("unknown template directive '{head}'"));
+ return Valid::fail(BlueprintError::UnknownTemplateDirective(head.to_string()));
}
}
Valid::succeed(())
}
-}
-
-impl FieldDefinition {
- pub fn validate_field(&self, type_of: &config::Type, config: &Config) -> Valid<(), String> {
- // XXX we could use `Mustache`'s `render` method with a mock
- // struct implementing the `PathString` trait encapsulating `validation_map`
- // but `render` simply falls back to the default value for a given
- // type if it doesn't exist, so we wouldn't be able to get enough
- // context from that method alone
- // So we must duplicate some of that logic here :(
- let parts_validator = MustachePartsValidator::new(type_of, config, self);
- match &self.resolver {
- Some(IR::IO(IO::Http { req_template, .. })) => {
+ fn validate_resolver(&self, resolver: &IR) -> Valid<(), BlueprintError> {
+ match resolver {
+ IR::Merge(resolvers) => {
+ Valid::from_iter(resolvers, |resolver| self.validate_resolver(resolver)).unit()
+ }
+ IR::IO(IO::Http { req_template, .. }) => {
Valid::from_iter(req_template.root_url.expression_segments(), |parts| {
- parts_validator.validate(parts, false).trace("path")
+ self.validate(parts, false).trace("path")
})
.and(Valid::from_iter(req_template.query.clone(), |query| {
let mustache = &query.value;
Valid::from_iter(mustache.expression_segments(), |parts| {
- parts_validator.validate(parts, true).trace("query")
+ self.validate(parts, true).trace("query")
})
}))
.unit()
+ .trace(config::Http::trace_name().as_str())
}
- Some(IR::IO(IO::GraphQL { req_template, .. })) => {
+ IR::IO(IO::GraphQL { req_template, .. }) => {
Valid::from_iter(req_template.headers.clone(), |(_, mustache)| {
Valid::from_iter(mustache.expression_segments(), |parts| {
- parts_validator.validate(parts, true).trace("headers")
+ self.validate(parts, true).trace("headers")
})
})
.and_then(|_| {
if let Some(args) = &req_template.operation_arguments {
Valid::from_iter(args, |(_, mustache)| {
Valid::from_iter(mustache.expression_segments(), |parts| {
- parts_validator.validate(parts, true).trace("args")
+ self.validate(parts, true).trace("args")
})
})
} else {
@@ -144,15 +138,16 @@ impl FieldDefinition {
}
})
.unit()
+ .trace(config::GraphQL::trace_name().as_str())
}
- Some(IR::IO(IO::Grpc { req_template, .. })) => {
+ IR::IO(IO::Grpc { req_template, .. }) => {
Valid::from_iter(req_template.url.expression_segments(), |parts| {
- parts_validator.validate(parts, false).trace("path")
+ self.validate(parts, false).trace("path")
})
.and(
Valid::from_iter(req_template.headers.clone(), |(_, mustache)| {
Valid::from_iter(mustache.expression_segments(), |parts| {
- parts_validator.validate(parts, true).trace("headers")
+ self.validate(parts, true).trace("headers")
})
})
.unit(),
@@ -161,7 +156,7 @@ impl FieldDefinition {
if let Some(body) = &req_template.body {
if let Some(mustache) = &body.mustache {
Valid::from_iter(mustache.expression_segments(), |parts| {
- parts_validator.validate(parts, true).trace("body")
+ self.validate(parts, true).trace("body")
})
} else {
// TODO: needs review
@@ -172,12 +167,35 @@ impl FieldDefinition {
}
})
.unit()
+ .trace(config::Grpc::trace_name().as_str())
}
+ // TODO: add validation for @expr
_ => Valid::succeed(()),
}
}
}
+impl FieldDefinition {
+ pub fn validate_field(
+ &self,
+ type_of: &config::Type,
+ config: &Config,
+ ) -> Valid<(), BlueprintError> {
+ // XXX we could use `Mustache`'s `render` method with a mock
+ // struct implementing the `PathString` trait encapsulating `validation_map`
+ // but `render` simply falls back to the default value for a given
+ // type if it doesn't exist, so we wouldn't be able to get enough
+ // context from that method alone
+ // So we must duplicate some of that logic here :(
+ let parts_validator = MustachePartsValidator::new(type_of, config, self);
+
+ match &self.resolver {
+ Some(resolver) => parts_validator.validate_resolver(resolver),
+ None => Valid::succeed(()),
+ }
+ }
+}
+
#[cfg(test)]
mod test {
use tailcall_valid::Validator;
diff --git a/src/core/blueprint/operators/apollo_federation.rs b/src/core/blueprint/operators/apollo_federation.rs
index 46c4a8e770..3aaf2b953c 100644
--- a/src/core/blueprint/operators/apollo_federation.rs
+++ b/src/core/blueprint/operators/apollo_federation.rs
@@ -4,8 +4,8 @@ use std::fmt::Write;
use async_graphql::parser::types::ServiceDocument;
use tailcall_valid::{Valid, Validator};
-use super::{compile_call, compile_expr, compile_graphql, compile_grpc, compile_http, compile_js};
-use crate::core::blueprint::{Blueprint, Definition, TryFoldConfig};
+use super::{compile_resolver, CompileResolver};
+use crate::core::blueprint::{Blueprint, BlueprintError, Definition, TryFoldConfig};
use crate::core::config::{
ApolloFederation, ConfigModule, EntityResolver, Field, GraphQLOperationType, Resolver,
};
@@ -13,11 +13,11 @@ use crate::core::ir::model::IR;
use crate::core::Type;
pub struct CompileEntityResolver<'a> {
- config_module: &'a ConfigModule,
- entity_resolver: &'a EntityResolver,
+ pub config_module: &'a ConfigModule,
+ pub entity_resolver: &'a EntityResolver,
}
-pub fn compile_entity_resolver(inputs: CompileEntityResolver<'_>) -> Valid {
+pub fn compile_entity_resolver(inputs: CompileEntityResolver<'_>) -> Valid {
let CompileEntityResolver { config_module, entity_resolver } = inputs;
let mut resolver_by_type = HashMap::new();
@@ -31,45 +31,26 @@ pub fn compile_entity_resolver(inputs: CompileEntityResolver<'_>) -> Valid compile_http(
- config_module,
- http,
- // inner resolver should resolve only single instance of type, not a list
- false,
- ),
- Resolver::Grpc(grpc) => compile_grpc(super::CompileGrpc {
- config_module,
- operation_type: &GraphQLOperationType::Query,
- field,
- grpc,
- validate_with_schema: true,
- }),
- Resolver::Graphql(graphql) => compile_graphql(
- config_module,
- &GraphQLOperationType::Query,
- type_name,
- graphql,
- ),
- Resolver::Call(call) => {
- compile_call(config_module, call, &GraphQLOperationType::Query, type_name)
- }
- Resolver::Js(js) => {
- compile_js(super::CompileJs { js, script: &config_module.extensions().script })
- }
- Resolver::Expr(expr) => {
- compile_expr(super::CompileExpr { config_module, field, expr, validate: true })
- }
Resolver::ApolloFederation(federation) => match federation {
ApolloFederation::EntityResolver(entity_resolver) => {
compile_entity_resolver(CompileEntityResolver { entity_resolver, ..inputs })
}
- ApolloFederation::Service => Valid::fail(
- "Apollo federation resolvers can't be a part of entity resolver"
- .to_string(),
- ),
+ ApolloFederation::Service => {
+ Valid::fail(BlueprintError::ApolloFederationResolversNoPartOfEntityResolver)
+ }
},
+ resolver => {
+ let inputs = CompileResolver {
+ config_module,
+ field,
+ operation_type: &GraphQLOperationType::Query,
+ object_name: type_name,
+ };
+
+ compile_resolver(&inputs, resolver).and_then(|resolver| {
+ Valid::from_option(resolver, BlueprintError::NoResolverFoundInSchema)
+ })
+ }
};
ir.map(|ir| {
@@ -80,7 +61,7 @@ pub fn compile_entity_resolver(inputs: CompileEntityResolver<'_>) -> Valid Valid {
+pub fn compile_service(mut sdl: String) -> Valid {
writeln!(sdl).ok();
// Mark subgraph as Apollo federation v2 compatible according to [docs](https://www.apollographql.com/docs/apollo-server/using-federation/apollo-subgraph-setup/#2-opt-in-to-federation-2)
@@ -111,11 +92,11 @@ pub fn update_federation<'a>() -> TryFoldConfig<'a, Blueprint> {
}
let Definition::Object(mut obj) = def else {
- return Valid::fail("Query type is not an object inside the blueprint".to_string());
+ return Valid::fail(BlueprintError::QueryTypeNotObject);
};
let Some(config_type) = config_module.types.get(&query_name) else {
- return Valid::fail(format!("Cannot find type {query_name} in the config"));
+ return Valid::fail(BlueprintError::TypeNotFoundInConfig(query_name.clone()));
};
Valid::from_iter(obj.fields.iter_mut(), |b_field| {
@@ -123,10 +104,15 @@ pub fn update_federation<'a>() -> TryFoldConfig<'a, Blueprint> {
let name = &b_field.name;
Valid::from_option(
config_type.fields.get(name),
- format!("Cannot find field {name} in the type"),
+ BlueprintError::FieldNotFoundInType(name.clone()),
)
.and_then(|field| {
- let Some(Resolver::ApolloFederation(federation)) = &field.resolver else {
+ let federation = field
+ .resolvers
+ .iter()
+ .find(|&resolver| matches!(resolver, Resolver::ApolloFederation(_)));
+
+ let Some(Resolver::ApolloFederation(federation)) = federation else {
return Valid::succeed(b_field);
};
diff --git a/src/core/blueprint/operators/call.rs b/src/core/blueprint/operators/call.rs
index 73afa763f7..8d543d8caa 100644
--- a/src/core/blueprint/operators/call.rs
+++ b/src/core/blueprint/operators/call.rs
@@ -1,35 +1,17 @@
use serde_json::Value;
-use tailcall_valid::{Valid, ValidationError, Validator};
+use tailcall_valid::{Valid, Validator};
use crate::core::blueprint::*;
use crate::core::config;
-use crate::core::config::{Field, GraphQLOperationType, Resolver};
+use crate::core::config::{Field, GraphQLOperationType};
use crate::core::ir::model::IR;
-use crate::core::try_fold::TryFold;
-
-pub fn update_call<'a>(
- operation_type: &'a GraphQLOperationType,
- object_name: &'a str,
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &str), FieldDefinition, String>::new(
- move |(config, field, _, _), b_field| {
- let Some(Resolver::Call(call)) = &field.resolver else {
- return Valid::succeed(b_field);
- };
-
- compile_call(config, call, operation_type, object_name)
- .map(|resolver| b_field.resolver(Some(resolver)))
- },
- )
-}
pub fn compile_call(
config_module: &ConfigModule,
call: &config::Call,
operation_type: &GraphQLOperationType,
object_name: &str,
-) -> Valid {
+) -> Valid {
Valid::from_iter(call.steps.iter(), |step| {
get_field_and_field_name(step, config_module).and_then(|(field, field_name, type_of)| {
let args = step.args.iter();
@@ -47,13 +29,12 @@ pub fn compile_call(
.collect();
if empties.len().gt(&0) {
- return Valid::fail(format!(
- "no argument {} found",
+ return Valid::fail(BlueprintError::ArgumentNotFound(
empties
.into_iter()
.map(|k| format!("'{}'", k))
.collect::>()
- .join(", ")
+ .join(", "),
))
.trace(field_name.as_str());
}
@@ -68,16 +49,18 @@ pub fn compile_call(
)
.and_then(|b_field| {
if b_field.resolver.is_none() {
- Valid::fail(format!("{} field has no resolver", field_name))
+ Valid::fail(BlueprintError::FieldHasNoResolver(field_name.clone()))
} else {
Valid::succeed(b_field)
}
})
.fuse(
- Valid::from(
- DynamicValue::try_from(&Value::Object(step.args.clone().into_iter().collect()))
- .map_err(|e| ValidationError::new(e.to_string())),
- )
+ match DynamicValue::try_from(&Value::Object(
+ step.args.clone().into_iter().collect(),
+ )) {
+ Ok(value) => Valid::succeed(value),
+ Err(e) => Valid::fail(BlueprintError::Error(e)),
+ }
.map(IR::Dynamic),
)
.map(|(mut b_field, args_expr)| {
@@ -102,11 +85,11 @@ pub fn compile_call(
b_field
}),
- "Steps can't be empty".to_string(),
+ BlueprintError::StepsCanNotBeEmpty,
)
})
.and_then(|field| {
- Valid::from_option(field.resolver, "Result resolver can't be empty".to_string())
+ Valid::from_option(field.resolver, BlueprintError::ResultResolverCanNotBeEmpty)
})
}
@@ -125,20 +108,20 @@ fn get_type_and_field(call: &config::Step) -> Option<(String, String)> {
fn get_field_and_field_name<'a>(
call: &'a config::Step,
config_module: &'a ConfigModule,
-) -> Valid<(&'a Field, String, &'a config::Type), String> {
+) -> Valid<(&'a Field, String, &'a config::Type), BlueprintError> {
Valid::from_option(
get_type_and_field(call),
- "call must have query or mutation".to_string(),
+ BlueprintError::CallMustHaveQueryOrMutation,
)
.and_then(|(type_name, field_name)| {
Valid::from_option(
config_module.config().find_type(&type_name),
- format!("{} type not found on config", type_name),
+ BlueprintError::TypeNotFoundInConfig(type_name.clone()),
)
.and_then(|query_type| {
Valid::from_option(
query_type.fields.get(&field_name),
- format!("{} field not found", field_name),
+ BlueprintError::FieldNotFoundInType(field_name.clone()),
)
.fuse(Valid::succeed(field_name))
.fuse(Valid::succeed(query_type))
diff --git a/src/core/blueprint/operators/enum_alias.rs b/src/core/blueprint/operators/enum_alias.rs
index 22eebd4965..9ad1c83595 100644
--- a/src/core/blueprint/operators/enum_alias.rs
+++ b/src/core/blueprint/operators/enum_alias.rs
@@ -8,10 +8,13 @@ use crate::core::config::Field;
use crate::core::ir::model::{Map, IR};
use crate::core::try_fold::TryFold;
-pub fn update_enum_alias<'a>(
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &'a str), FieldDefinition, String>::new(
+pub fn update_enum_alias<'a>() -> TryFold<
+ 'a,
+ (&'a ConfigModule, &'a Field, &'a config::Type, &'a str),
+ FieldDefinition,
+ BlueprintError,
+> {
+ TryFold::<(&ConfigModule, &Field, &config::Type, &'a str), FieldDefinition, BlueprintError>::new(
|(config, field, _, _), mut b_field| {
let enum_type = config.enums.get(field.type_of.name());
if let Some(enum_type) = enum_type {
diff --git a/src/core/blueprint/operators/expr.rs b/src/core/blueprint/operators/expr.rs
index 42d767487c..7f0788999d 100644
--- a/src/core/blueprint/operators/expr.rs
+++ b/src/core/blueprint/operators/expr.rs
@@ -1,24 +1,23 @@
use async_graphql_value::ConstValue;
-use tailcall_valid::{Valid, ValidationError, Validator};
+use tailcall_valid::{Valid, Validator};
use crate::core::blueprint::*;
use crate::core::config;
-use crate::core::config::{Expr, Field, Resolver};
+use crate::core::config::Expr;
use crate::core::ir::model::IR;
use crate::core::ir::model::IR::Dynamic;
-use crate::core::try_fold::TryFold;
fn validate_data_with_schema(
config: &config::Config,
field: &config::Field,
gql_value: ConstValue,
-) -> Valid<(), String> {
+) -> Valid<(), BlueprintError> {
match to_json_schema(&field.type_of, config)
.validate(&gql_value)
.to_result()
{
Ok(_) => Valid::succeed(()),
- Err(err) => Valid::from_validation_err(err.transform(&(|a| a.to_owned()))),
+ Err(err) => Valid::from_validation_err(BlueprintError::from_validation_str(err)),
}
}
@@ -29,15 +28,16 @@ pub struct CompileExpr<'a> {
pub validate: bool,
}
-pub fn compile_expr(inputs: CompileExpr) -> Valid {
+pub fn compile_expr(inputs: CompileExpr) -> Valid {
let config_module = inputs.config_module;
let field = inputs.field;
let value = &inputs.expr.body;
let validate = inputs.validate;
- Valid::from(
- DynamicValue::try_from(&value.clone()).map_err(|e| ValidationError::new(e.to_string())),
- )
+ match DynamicValue::try_from(&value.clone()) {
+ Ok(data) => Valid::succeed(data),
+ Err(err) => Valid::fail(BlueprintError::Error(err)),
+ }
.and_then(|value| {
if !value.is_const() {
// TODO: Add validation for const with Mustache here
@@ -53,23 +53,8 @@ pub fn compile_expr(inputs: CompileExpr) -> Valid {
};
validation.map(|_| Dynamic(value.to_owned()))
}
- Err(e) => Valid::fail(format!("invalid JSON: {}", e)),
+ Err(e) => Valid::fail(BlueprintError::InvalidJson(e)),
}
}
})
}
-
-pub fn update_const_field<'a>(
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &str), FieldDefinition, String>::new(
- |(config_module, field, _, _), b_field| {
- let Some(Resolver::Expr(expr)) = &field.resolver else {
- return Valid::succeed(b_field);
- };
-
- compile_expr(CompileExpr { config_module, field, expr, validate: true })
- .map(|resolver| b_field.resolver(Some(resolver)))
- },
- )
-}
diff --git a/src/core/blueprint/operators/graphql.rs b/src/core/blueprint/operators/graphql.rs
index ee25eb5fd1..4fe3189f2b 100644
--- a/src/core/blueprint/operators/graphql.rs
+++ b/src/core/blueprint/operators/graphql.rs
@@ -1,16 +1,13 @@
use std::collections::{HashMap, HashSet};
-use tailcall_valid::{Valid, ValidationError, Validator};
+use tailcall_valid::{Valid, Validator};
-use crate::core::blueprint::FieldDefinition;
-use crate::core::config::{
- Config, ConfigModule, Field, GraphQL, GraphQLOperationType, Resolver, Type,
-};
+use crate::core::blueprint::BlueprintError;
+use crate::core::config::{Config, ConfigModule, GraphQL, GraphQLOperationType};
use crate::core::graphql::RequestTemplate;
use crate::core::helpers;
use crate::core::ir::model::{IO, IR};
use crate::core::ir::RelatedFields;
-use crate::core::try_fold::TryFold;
fn create_related_fields(
config: &Config,
@@ -61,22 +58,28 @@ pub fn compile_graphql(
operation_type: &GraphQLOperationType,
type_name: &str,
graphql: &GraphQL,
-) -> Valid {
+) -> Valid {
let args = graphql.args.as_ref();
+
+ let mustache = match helpers::headers::to_mustache_headers(&graphql.headers).to_result() {
+ Ok(mustache) => Valid::succeed(mustache),
+ Err(err) => Valid::from_validation_err(BlueprintError::from_validation_string(err)),
+ };
+
Valid::succeed(graphql.url.as_str())
- .zip(helpers::headers::to_mustache_headers(&graphql.headers))
+ .zip(mustache)
.and_then(|(base_url, headers)| {
- Valid::from(
- RequestTemplate::new(
- base_url.to_owned(),
- operation_type,
- &graphql.name,
- args,
- headers,
- create_related_fields(config, type_name, &mut HashSet::new()),
- )
- .map_err(|e| ValidationError::new(e.to_string())),
- )
+ match RequestTemplate::new(
+ base_url.to_owned(),
+ operation_type,
+ &graphql.name,
+ args,
+ headers,
+ create_related_fields(config, type_name, &mut HashSet::new()),
+ ) {
+ Ok(req_template) => Valid::succeed(req_template),
+ Err(err) => Valid::fail(BlueprintError::Error(err)),
+ }
})
.map(|req_template| {
let field_name = graphql.name.clone();
@@ -85,19 +88,3 @@ pub fn compile_graphql(
IR::IO(IO::GraphQL { req_template, field_name, batch, dl_id: None, dedupe })
})
}
-
-pub fn update_graphql<'a>(
- operation_type: &'a GraphQLOperationType,
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a Type, &'a str), FieldDefinition, String> {
- TryFold::<(&ConfigModule, &Field, &Type, &'a str), FieldDefinition, String>::new(
- |(config, field, type_of, _), b_field| {
- let Some(Resolver::Graphql(graphql)) = &field.resolver else {
- return Valid::succeed(b_field);
- };
-
- compile_graphql(config, operation_type, field.type_of.name(), graphql)
- .map(|resolver| b_field.resolver(Some(resolver)))
- .and_then(|b_field| b_field.validate_field(type_of, config).map_to(b_field))
- },
- )
-}
diff --git a/src/core/blueprint/operators/grpc.rs b/src/core/blueprint/operators/grpc.rs
index 3f730b109c..bcf7f979d6 100644
--- a/src/core/blueprint/operators/grpc.rs
+++ b/src/core/blueprint/operators/grpc.rs
@@ -5,16 +5,15 @@ use prost_reflect::FieldDescriptor;
use tailcall_valid::{Valid, ValidationError, Validator};
use super::apply_select;
-use crate::core::blueprint::FieldDefinition;
+use crate::core::blueprint::BlueprintError;
use crate::core::config::group_by::GroupBy;
-use crate::core::config::{Config, ConfigModule, Field, GraphQLOperationType, Grpc, Resolver};
+use crate::core::config::{Config, ConfigModule, Field, GraphQLOperationType, Grpc};
use crate::core::grpc::protobuf::{ProtobufOperation, ProtobufSet};
use crate::core::grpc::request_template::RequestTemplate;
+use crate::core::helpers;
use crate::core::ir::model::{IO, IR};
use crate::core::json::JsonSchema;
use crate::core::mustache::Mustache;
-use crate::core::try_fold::TryFold;
-use crate::core::{config, helpers};
fn to_url(grpc: &Grpc, method: &GrpcMethod) -> Valid {
Valid::succeed(grpc.url.as_str()).and_then(|base_url| {
@@ -64,12 +63,22 @@ fn validate_schema(
field_schema: FieldSchema,
operation: &ProtobufOperation,
name: &str,
-) -> Valid<(), String> {
+) -> Valid<(), BlueprintError> {
let input_type = &operation.input_type;
let output_type = &operation.output_type;
- Valid::from(JsonSchema::try_from(input_type))
- .zip(Valid::from(JsonSchema::try_from(output_type)))
+ let input_type = match JsonSchema::try_from(input_type) {
+ Ok(input_schema) => Valid::succeed(input_schema),
+ Err(e) => Valid::from_validation_err(BlueprintError::from_validation_string(e)),
+ };
+
+ let output_type = match JsonSchema::try_from(output_type) {
+ Ok(output_type) => Valid::succeed(output_type),
+ Err(e) => Valid::from_validation_err(BlueprintError::from_validation_string(e)),
+ };
+
+ input_type
+ .zip(output_type)
.and_then(|(_input_schema, sub_type)| {
// TODO: add validation for input schema - should compare result grpc.body to
// schema
@@ -77,7 +86,10 @@ fn validate_schema(
// TODO: all of the fields in protobuf are optional actually
// and if we want to mark some fields as required in GraphQL
// JsonSchema won't match and the validation will fail
- sub_type.is_a(&super_type, name)
+ match sub_type.is_a(&super_type, name).to_result() {
+ Ok(res) => Valid::succeed(res),
+ Err(e) => Valid::from_validation_err(BlueprintError::from_validation_string(e)),
+ }
})
}
@@ -85,20 +97,30 @@ fn validate_group_by(
field_schema: &FieldSchema,
operation: &ProtobufOperation,
group_by: Vec,
-) -> Valid<(), String> {
+) -> Valid<(), BlueprintError> {
let input_type = &operation.input_type;
let output_type = &operation.output_type;
- let mut field_descriptor: Result> = None.ok_or(
- ValidationError::new(format!("field {} not found", group_by[0])),
- );
+ let mut field_descriptor: Result> = None
+ .ok_or(ValidationError::new(BlueprintError::FieldNotFound(
+ group_by[0].clone(),
+ )));
for item in group_by.iter().take(&group_by.len() - 1) {
- field_descriptor = output_type
- .get_field_by_json_name(item.as_str())
- .ok_or(ValidationError::new(format!("field {} not found", item)));
+ field_descriptor =
+ output_type
+ .get_field_by_json_name(item.as_str())
+ .ok_or(ValidationError::new(BlueprintError::FieldNotFound(
+ item.clone(),
+ )));
}
- let output_type = field_descriptor.and_then(|f| JsonSchema::try_from(&f));
+ let output_type = field_descriptor
+ .and_then(|f| JsonSchema::try_from(&f).map_err(BlueprintError::from_validation_string));
- Valid::from(JsonSchema::try_from(input_type))
+ let json_schema = match JsonSchema::try_from(input_type) {
+ Ok(schema) => Valid::succeed(schema),
+ Err(e) => Valid::from_validation_err(BlueprintError::from_validation_string(e)),
+ };
+
+ json_schema
.zip(Valid::from(output_type))
.and_then(|(_input_schema, output_schema)| {
// TODO: add validation for input schema - should compare result grpc.body to
@@ -106,7 +128,13 @@ fn validate_group_by(
let fields = &field_schema.field;
// we're treating List types for gRPC as optional.
let fields = JsonSchema::Opt(Box::new(JsonSchema::Arr(Box::new(fields.to_owned()))));
- fields.is_a(&output_schema, group_by[0].as_str())
+ match fields
+ .is_a(&output_schema, group_by[0].as_str())
+ .to_result()
+ {
+ Ok(res) => Valid::succeed(res),
+ Err(e) => Valid::from_validation_err(BlueprintError::from_validation_string(e)),
+ }
})
}
@@ -130,7 +158,7 @@ impl Display for GrpcMethod {
}
impl TryFrom<&str> for GrpcMethod {
- type Error = ValidationError;
+ type Error = ValidationError;
fn try_from(value: &str) -> Result {
let parts: Vec<&str> = value.rsplitn(3, '.').collect();
@@ -143,15 +171,14 @@ impl TryFrom<&str> for GrpcMethod {
};
Ok(method)
}
- _ => Err(ValidationError::new(format!(
- "Invalid method format: {}. Expected format is ..",
- value
- ))),
+ _ => Err(ValidationError::new(
+ BlueprintError::InvalidGrpcMethodFormat(value.to_string()),
+ )),
}
}
}
-pub fn compile_grpc(inputs: CompileGrpc) -> Valid {
+pub fn compile_grpc(inputs: CompileGrpc) -> Valid {
let config_module = inputs.config_module;
let operation_type = inputs.operation_type;
let field = inputs.field;
@@ -164,14 +191,18 @@ pub fn compile_grpc(inputs: CompileGrpc) -> Valid {
let file_descriptor_set = config_module.extensions().get_file_descriptor_set();
if file_descriptor_set.file.is_empty() {
- return Valid::fail("Protobuf files were not specified in the config".to_string());
+ return Valid::fail(BlueprintError::ProtobufFilesNotSpecifiedInConfig);
}
- to_operation(&method, file_descriptor_set)
+ match to_operation(&method, file_descriptor_set)
.fuse(to_url(grpc, &method))
.fuse(helpers::headers::to_mustache_headers(&grpc.headers))
.fuse(helpers::body::to_body(grpc.body.as_ref()))
- .into()
+ .to_result()
+ {
+ Ok(data) => Valid::succeed(data),
+ Err(e) => Valid::from_validation_err(BlueprintError::from_validation_string(e)),
+ }
})
.and_then(|(operation, url, headers, body)| {
let validation = if validate_with_schema {
@@ -210,33 +241,6 @@ pub fn compile_grpc(inputs: CompileGrpc) -> Valid {
.and_then(apply_select)
}
-pub fn update_grpc<'a>(
- operation_type: &'a GraphQLOperationType,
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &'a str), FieldDefinition, String>::new(
- |(config_module, field, type_of, _name), b_field| {
- let Some(Resolver::Grpc(grpc)) = &field.resolver else {
- return Valid::succeed(b_field);
- };
-
- compile_grpc(CompileGrpc {
- config_module,
- operation_type,
- field,
- grpc,
- validate_with_schema: true,
- })
- .map(|resolver| b_field.resolver(Some(resolver)))
- .and_then(|b_field| {
- b_field
- .validate_field(type_of, config_module)
- .map_to(b_field)
- })
- },
- )
-}
-
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
@@ -244,6 +248,7 @@ mod tests {
use tailcall_valid::ValidationError;
use super::GrpcMethod;
+ use crate::core::blueprint::BlueprintError;
#[test]
fn try_from_grpc_method() {
@@ -266,7 +271,9 @@ mod tests {
assert!(result.is_err());
assert_eq!(
result.err().unwrap(),
- ValidationError::new("Invalid method format: package_name.ServiceName. Expected format is ..".to_string())
+ ValidationError::new(BlueprintError::InvalidGrpcMethodFormat(
+ "package_name.ServiceName".to_string()
+ ))
);
}
}
diff --git a/src/core/blueprint/operators/http.rs b/src/core/blueprint/operators/http.rs
index 6518388845..4b9b9a30d1 100644
--- a/src/core/blueprint/operators/http.rs
+++ b/src/core/blueprint/operators/http.rs
@@ -1,35 +1,44 @@
-use tailcall_valid::{Valid, ValidationError, Validator};
+use tailcall_valid::{Valid, Validator};
+use template_validation::validate_argument;
use crate::core::blueprint::*;
use crate::core::config::group_by::GroupBy;
-use crate::core::config::{Field, Resolver};
+use crate::core::config::Field;
use crate::core::endpoint::Endpoint;
use crate::core::http::{HttpFilter, Method, RequestTemplate};
use crate::core::ir::model::{IO, IR};
-use crate::core::try_fold::TryFold;
use crate::core::{config, helpers, Mustache};
pub fn compile_http(
config_module: &config::ConfigModule,
http: &config::Http,
- is_list: bool,
-) -> Valid {
+ field: &Field,
+) -> Valid {
+ let is_list = field.type_of.is_list();
let dedupe = http.dedupe.unwrap_or_default();
+ let mustache_headers = match helpers::headers::to_mustache_headers(&http.headers).to_result() {
+ Ok(mustache_headers) => Valid::succeed(mustache_headers),
+ Err(e) => Valid::from_validation_err(BlueprintError::from_validation_string(e)),
+ };
- Valid::<(), String>::fail("GroupBy is only supported for GET requests".to_string())
+ Valid::<(), BlueprintError>::fail(BlueprintError::GroupByOnlyForGet)
.when(|| !http.batch_key.is_empty() && http.method != Method::GET)
.and(
- Valid::<(), String>::fail(
- "Batching capability was used without enabling it in upstream".to_string(),
- )
- .when(|| {
+ Valid::<(), BlueprintError>::fail(BlueprintError::IncorrectBatchingUsage).when(|| {
(config_module.upstream.get_delay() < 1
|| config_module.upstream.get_max_size() < 1)
&& !http.batch_key.is_empty()
}),
)
+ .and(
+ Valid::from_iter(http.query.iter(), |query| {
+ validate_argument(config_module, Mustache::parse(query.value.as_str()), field)
+ })
+ .unit()
+ .trace("query"),
+ )
.and(Valid::succeed(http.url.as_str()))
- .zip(helpers::headers::to_mustache_headers(&http.headers))
+ .zip(mustache_headers)
.and_then(|(base_url, headers)| {
let query = http
.query
@@ -44,7 +53,7 @@ pub fn compile_http(
})
.collect();
- RequestTemplate::try_from(
+ match RequestTemplate::try_from(
Endpoint::new(base_url.to_string())
.method(http.method.clone())
.query(query)
@@ -52,8 +61,10 @@ pub fn compile_http(
.encoding(http.encoding.clone()),
)
.map(|req_tmpl| req_tmpl.headers(headers))
- .map_err(|e| ValidationError::new(e.to_string()))
- .into()
+ {
+ Ok(data) => Valid::succeed(data),
+ Err(e) => Valid::fail(BlueprintError::Error(e)),
+ }
})
.map(|req_template| {
// marge http and upstream on_request
@@ -92,23 +103,3 @@ pub fn compile_http(
})
.and_then(apply_select)
}
-
-pub fn update_http<'a>(
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &'a str), FieldDefinition, String>::new(
- |(config_module, field, type_of, _), b_field| {
- let Some(Resolver::Http(http)) = &field.resolver else {
- return Valid::succeed(b_field);
- };
-
- compile_http(config_module, http, field.type_of.is_list())
- .map(|resolver| b_field.resolver(Some(resolver)))
- .and_then(|b_field| {
- b_field
- .validate_field(type_of, config_module)
- .map_to(b_field)
- })
- },
- )
-}
diff --git a/src/core/blueprint/operators/js.rs b/src/core/blueprint/operators/js.rs
index 2eb35ee423..034f47fd84 100644
--- a/src/core/blueprint/operators/js.rs
+++ b/src/core/blueprint/operators/js.rs
@@ -1,33 +1,16 @@
use tailcall_valid::{Valid, Validator};
-use crate::core::blueprint::FieldDefinition;
-use crate::core::config;
-use crate::core::config::{ConfigModule, Field, Resolver, JS};
+use crate::core::blueprint::BlueprintError;
+use crate::core::config::JS;
use crate::core::ir::model::{IO, IR};
-use crate::core::try_fold::TryFold;
pub struct CompileJs<'a> {
pub js: &'a JS,
pub script: &'a Option,
}
-pub fn compile_js(inputs: CompileJs) -> Valid {
+pub fn compile_js(inputs: CompileJs) -> Valid {
let name = &inputs.js.name;
- Valid::from_option(inputs.script.as_ref(), "script is required".to_string())
+ Valid::from_option(inputs.script.as_ref(), BlueprintError::ScriptIsRequired)
.map(|_| IR::IO(IO::Js { name: name.to_string() }))
}
-
-pub fn update_js_field<'a>(
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &str), FieldDefinition, String>::new(
- |(module, field, _, _), b_field| {
- let Some(Resolver::Js(js)) = &field.resolver else {
- return Valid::succeed(b_field);
- };
-
- compile_js(CompileJs { script: &module.extensions().script, js })
- .map(|resolver| b_field.resolver(Some(resolver)))
- },
- )
-}
diff --git a/src/core/blueprint/operators/mod.rs b/src/core/blueprint/operators/mod.rs
index 77947e9571..0548e74111 100644
--- a/src/core/blueprint/operators/mod.rs
+++ b/src/core/blueprint/operators/mod.rs
@@ -8,6 +8,7 @@ mod http;
mod js;
mod modify;
mod protected;
+mod resolver;
mod select;
pub use apollo_federation::*;
@@ -20,4 +21,5 @@ pub use http::*;
pub use js::*;
pub use modify::*;
pub use protected::*;
+pub use resolver::*;
pub use select::*;
diff --git a/src/core/blueprint/operators/modify.rs b/src/core/blueprint/operators/modify.rs
index add132345a..29ca3c64f2 100644
--- a/src/core/blueprint/operators/modify.rs
+++ b/src/core/blueprint/operators/modify.rs
@@ -6,10 +6,13 @@ use crate::core::config::Field;
use crate::core::ir::model::IR;
use crate::core::try_fold::TryFold;
-pub fn update_modify<'a>(
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &'a str), FieldDefinition, String>::new(
+pub fn update_modify<'a>() -> TryFold<
+ 'a,
+ (&'a ConfigModule, &'a Field, &'a config::Type, &'a str),
+ FieldDefinition,
+ BlueprintError,
+> {
+ TryFold::<(&ConfigModule, &Field, &config::Type, &'a str), FieldDefinition, BlueprintError>::new(
|(config, field, type_of, _), mut b_field| {
if let Some(modify) = field.modify.as_ref() {
if let Some(new_name) = &modify.name {
@@ -17,9 +20,7 @@ pub fn update_modify<'a>(
let interface = config.find_type(name);
if let Some(interface) = interface {
if interface.fields.iter().any(|(name, _)| name == new_name) {
- return Valid::fail(
- "Field is already implemented from interface".to_string(),
- );
+ return Valid::fail(BlueprintError::FieldExistsInInterface);
}
}
}
diff --git a/src/core/blueprint/operators/protected.rs b/src/core/blueprint/operators/protected.rs
index ee8e3f8c93..6f80a442d4 100644
--- a/src/core/blueprint/operators/protected.rs
+++ b/src/core/blueprint/operators/protected.rs
@@ -1,15 +1,19 @@
use tailcall_valid::{Valid, Validator};
-use crate::core::blueprint::{Auth, FieldDefinition, Provider};
+use crate::core::blueprint::{Auth, BlueprintError, FieldDefinition, Provider};
use crate::core::config::{self, ConfigModule, Field};
use crate::core::ir::model::IR;
use crate::core::try_fold::TryFold;
pub fn update_protected<'a>(
type_name: &'a str,
-) -> TryFold<'a, (&'a ConfigModule, &'a Field, &'a config::Type, &'a str), FieldDefinition, String>
-{
- TryFold::<(&ConfigModule, &Field, &config::Type, &'a str), FieldDefinition, String>::new(
+) -> TryFold<
+ 'a,
+ (&'a ConfigModule, &'a Field, &'a config::Type, &'a str),
+ FieldDefinition,
+ BlueprintError,
+> {
+ TryFold::<(&ConfigModule, &Field, &config::Type, &'a str), FieldDefinition, BlueprintError>::new(
|(config, field, type_, _), mut b_field| {
if field.protected.is_some() // check the field itself has marked as protected
|| type_.protected.is_some() // check the type that contains current field
@@ -19,13 +23,11 @@ pub fn update_protected<'a>(
.is_some()
{
if config.input_types().contains(type_name) {
- return Valid::fail("Input types can not be protected".to_owned());
+ return Valid::fail(BlueprintError::InputTypesCannotBeProtected);
}
if !config.extensions().has_auth() {
- return Valid::fail(
- "@protected operator is used but there is no @link definitions for auth providers".to_owned(),
- );
+ return Valid::fail(BlueprintError::ProtectedOperatorNoAuthProviders);
}
// Used to collect the providers that are used in the field
@@ -58,7 +60,7 @@ pub fn update_protected<'a>(
if let Some(provider) = providers.get(id) {
Valid::succeed(Auth::Provider(provider.clone()))
} else {
- Valid::fail(format!("Auth provider {} not found", id))
+ Valid::fail(BlueprintError::AuthProviderNotFound(id.clone()))
}
})
.map(|provider| {
diff --git a/src/core/blueprint/operators/resolver.rs b/src/core/blueprint/operators/resolver.rs
new file mode 100644
index 0000000000..6d296db87b
--- /dev/null
+++ b/src/core/blueprint/operators/resolver.rs
@@ -0,0 +1,93 @@
+use tailcall_valid::{Valid, Validator};
+
+use super::{compile_call, compile_expr, compile_graphql, compile_grpc, compile_http, compile_js};
+use crate::core::blueprint::{BlueprintError, FieldDefinition};
+use crate::core::config::{self, ConfigModule, Field, GraphQLOperationType, Resolver};
+use crate::core::directive::DirectiveCodec;
+use crate::core::ir::model::IR;
+use crate::core::try_fold::TryFold;
+
+pub struct CompileResolver<'a> {
+ pub config_module: &'a ConfigModule,
+ pub field: &'a Field,
+ pub operation_type: &'a GraphQLOperationType,
+ pub object_name: &'a str,
+}
+
+pub fn compile_resolver(
+ inputs: &CompileResolver,
+ resolver: &Resolver,
+) -> Valid