commit 94d689fa820f3c7ecafc59c580106c43684644be Author: Magnus Hallin Date: Sun Sep 11 18:41:29 2016 +0200 Initial import diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..a9d37c56 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +target +Cargo.lock diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..764b6c86 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,15 @@ +language: rust + +rust: + - stable + - beta + - nightly +matrix: + allow_failures: + - rust: nightly + +script: + cargo build --verbose + cargo build --features iron-handlers --verbose + + cargo test --verbose --features iron-handlers diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000..ca02635c --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "juniper" +version = "0.1.0" +authors = ["Magnus Hallin "] + +[features] +default = [] +nightly = [] +iron-handlers = ["iron"] + +[dependencies] +rustc-serialize = "0.3.19" +iron = { version = "^0.4.0", optional = true } + +[dev-dependencies] +iron = "^0.4.0" +router = "^0.2.0" +mount = "^0.2.1" +logger = "^0.1.0" +iron-test = "^0.4.0" diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..0ccd1e17 --- /dev/null +++ b/LICENSE @@ -0,0 +1,25 @@ +BSD 2-Clause License + +Copyright (c) 2016, Magnus Hallin +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 00000000..dd793bdd --- /dev/null +++ b/README.md @@ -0,0 +1,143 @@ +# Juniper + +> GraphQL server library for Rust + +--- + +[GraphQL][graphql] is a data query language developed by Facebook intended to +serve mobile and web application frontends. Juniper makes it possible to write +GraphQL servers in Rust that are type-safe and blazingly fast. + +Juniper does not include a web server - instead it provides building blocks to +make integration with existing servers straightforward. It optionally provides a +pre-built integration for the [Iron framework][iron]. + +## Installation + +Add Juniper to your Cargo.toml: + +```toml +[dependencies] +juniper = "0.5.0" +``` + +If you want the Iron integration enabled, you need to enable the `iron-handlers` +feature flag: + +```toml +[dependencies] +juniper = { version = "0.5.0", features = ["iron-handlers"] } +``` + +## Building schemas + +GraphQL turns the REST paradigm as it's usually implemented on its head: instead +of providing a fixed structure of all types and relations in the system, GraphQL +defines a _schema_ which your users can query. The schema defines all types, +fields, and relations available, while the query defines which fields and +relations a user is interested in. + +Juniper expects you to already have the types you want to expose in GraphQL as +Rust data types. Other than that, it doesn't make any assumptions whether they +are stored in a database or just in memory. Exposing a type is a matter of +implementing the `GraphQLType` for your type. To make things a bit easier, +Juniper comes with a set of macros that help you do this, based on what kind of +type you want to expose. Let's look at how one could expose parts of the [Star +Wars Schema][swschema]: + +```rust +#[macro_use] extern crate juniper; + +use juniper::FieldResult; + +enum Episode { + NewHope, + Empire, + Jedi, +} + +struct Human { + id: String, + name: String, + appears_in: Vec, + home_planet: String, +} + +graphql_enum!(Episode { + Episode::NewHope => "NEW_HOPE", + Episode::Empire => "EMPIRE", + Episode::Jedi => "JEDI", +}); + +graphql_object!(Human: () as "Human" |&self| { + description: "A humanoid creature in the Star Wars universe" + + // Field resolver methods look almost like ordinary methods. The macro picks + // up arguments and return types for the introspection schema, and verifies + // it during compilation. + field id() -> FieldResult<&String> { + Ok(&self.id) + } + + field name() -> FieldResult<&String> { + Ok(&self.name) + } + + field appears_in() -> FieldResult<&Vec> { + Ok(&self.appears_in) + } + + field home_planet() -> FieldResult<&String> { + Ok(&self.home_planet) + } +}); +``` + +You can find the full example in [src/tests/schema.rs][test_schema_rs], +including polymorphism with traits and interfaces. For an example of the Iron +integration, see the [examples folder][examples]. + +## Features + +Juniper supports the full GraphQL query language according to the +[specification][graphql_spec], including the introspective schema and all +validations. It does not, however, support the schema language. + +As an exception to other GraphQL libraries for other languages, Juniper builds +non-null types by default. A field of type `Vec` will be converted into +`[Episode!]!`. The corresponding Rust type for e.g. `[Episode]` would be +`Option>>`. + +## API Stability + +Juniper has not reached 1.0 yet, thus some API instability should be expected. + +## 1.0 Roadmap + +The road to 1.0 _focuses_ on two aspects: making sure the API hasn't got any +obvious dead-ends with respect to probable future features, and improving test +coverage for general execution. There are some chores that need to be completed +as well. + +* [ ] Extensive execution testing + * [ ] Sending input objects and partial input objects in variables + * [ ] Sending enums in variables + * [ ] General input value type checking and validation +* [ ] Improve helper macros + * [ ] `graphql_union!` helper completely missing + * [ ] Add support for deprecating things + * [ ] Custom enum values and descriptions + * [ ] Improved syntax for fields that can't fail resolution - make + `FieldResult` optional maybe? +* [ ] Investigate asynchronous execution - implementing it is not necessary, but + at least look at what API changes will be needed for us to hook into + [Tokio][tokio], for example. +* [ ] Larger examples to illustrate things like database access + +[graphql]: http://graphql.org +[iron]: http://ironframework.io +[swschema]: http://graphql.org/docs/typesystem/ +[graphql_spec]: http://facebook.github.io/graphql +[test_schema_rs]: src/tests/schema.rs +[tokio]: https://github.com/tokio-rs/tokio +[examples]: examples/ diff --git a/examples/server.rs b/examples/server.rs new file mode 100644 index 00000000..53f1bc5c --- /dev/null +++ b/examples/server.rs @@ -0,0 +1,56 @@ +extern crate iron; +extern crate mount; +extern crate logger; +extern crate rustc_serialize; +#[macro_use] extern crate juniper; + +use mount::Mount; +use logger::Logger; +use iron::prelude::*; +use juniper::FieldResult; +use juniper::iron_handlers::{GraphQLHandler, GraphiQLHandler}; + +fn context_factory(_: &mut Request) -> () { + () +} + +fn main() { + let mut mount = Mount::new(); + + let graphql_endpoint = GraphQLHandler::new(context_factory, Query { }, Mutation { }); + let graphiql_endpoint = GraphiQLHandler::new("/graphql"); + + mount.mount("/graphiql", graphiql_endpoint); + mount.mount("/graphql", graphql_endpoint); + + let (logger_before, logger_after) = Logger::new(None); + + let mut chain = Chain::new(mount); + chain.link_before(logger_before); + chain.link_after(logger_after); + + let host = "localhost:8080"; + println!("GraphQL server started on {}", host); + Iron::new(chain).http(host).unwrap(); +} + +struct Query {} +struct Mutation {} + +graphql_object!(Query: () as "Query" |&self| { + field dummy() -> FieldResult<&str> { + Ok("Dummy field") + } + + field error() -> FieldResult<&str> { + Err("Can't do it".to_owned()) + } +}); + +graphql_object!( Mutation: CtxT as "Mutation" |&self| { + field print(value: String) -> FieldResult { + println!("Printing text according to mutation"); + println!("{}", value); + Ok(value) + } +}); diff --git a/src/ast.rs b/src/ast.rs new file mode 100644 index 00000000..e681e40e --- /dev/null +++ b/src/ast.rs @@ -0,0 +1,445 @@ +use std::fmt; +use std::collections::HashMap; +use std::hash::Hash; +use std::vec; +use std::slice; + +use rustc_serialize::json::{ToJson, Json}; + +use parser::Spanning; + +/// A type literal in the syntax tree +/// +/// This enum carries no semantic information and might refer to types that do +/// not exist. +#[derive(Clone, Eq, PartialEq, Debug)] +pub enum Type { + /// A nullable named type, e.g. `String` + Named(String), + /// A nullable list type, e.g. `[String]` + /// + /// The list itself is what's nullable, the containing type might be non-null. + List(Box), + /// A non-null named type, e.g. `String!` + NonNullNamed(String), + /// A non-null list type, e.g. `[String]!`. + /// + /// The list itself is what's non-null, the containing type might be null. + NonNullList(Box), +} + +/// A JSON-like value that can be passed into the query execution, either +/// out-of-band, or in-band as default variable values. These are _not_ constant +/// and might contain variables. +/// +/// Lists and objects variants are _spanned_, i.e. they contain a reference to +/// their position in the source file, if available. +#[derive(Clone, PartialEq, Debug)] +#[allow(missing_docs)] +pub enum InputValue { + Null, + Int(i64), + Float(f64), + String(String), + Boolean(bool), + Enum(String), + Variable(String), + List(Vec>), + Object(Vec<(Spanning, Spanning)>), +} + +#[derive(Clone, PartialEq, Debug)] +pub struct VariableDefinition { + pub var_type: Spanning, + pub default_value: Option>, +} + +#[derive(Clone, PartialEq, Debug)] +pub struct Arguments { + pub items: Vec<(Spanning, Spanning)>, +} + +#[derive(Clone, PartialEq, Debug)] +pub struct VariableDefinitions { + pub items: Vec<(Spanning, VariableDefinition)>, +} + +#[derive(Clone, PartialEq, Debug)] +pub struct Field { + pub alias: Option>, + pub name: Spanning, + pub arguments: Option>, + pub directives: Option>>, + pub selection_set: Option>, +} + +#[derive(Clone, PartialEq, Debug)] +pub struct FragmentSpread { + pub name: Spanning, + pub directives: Option>>, +} + +#[derive(Clone, PartialEq, Debug)] +pub struct InlineFragment { + pub type_condition: Option>, + pub directives: Option>>, + pub selection_set: Vec, +} + +/// Entry in a GraphQL selection set +/// +/// This enum represents one of the three variants of a selection that exists +/// in GraphQL: a field, a fragment spread, or an inline fragment. Each of the +/// variants references their location in the query source. +/// +/// ```text +/// { +/// field(withArg: 123) { subField } +/// ...fragmentSpread +/// ...on User { +/// inlineFragmentField +/// } +/// } +/// ``` +#[derive(Clone, PartialEq, Debug)] +#[allow(missing_docs)] +pub enum Selection { + Field(Spanning), + FragmentSpread(Spanning), + InlineFragment(Spanning), +} + +#[derive(Clone, PartialEq, Debug)] +pub struct Directive { + pub name: Spanning, + pub arguments: Option>, +} + +#[derive(Clone, PartialEq, Debug)] +pub enum OperationType { + Query, + Mutation, +} + +#[derive(Clone, PartialEq, Debug)] +pub struct Operation { + pub operation_type: OperationType, + pub name: Option>, + pub variable_definitions: Option>, + pub directives: Option>>, + pub selection_set: Vec, +} + +#[derive(Clone, PartialEq, Debug)] +pub struct Fragment { + pub name: Spanning, + pub type_condition: Spanning, + pub directives: Option>>, + pub selection_set: Vec, +} + +#[derive(Clone, PartialEq, Debug)] +pub enum Definition { + Operation(Spanning), + Fragment(Spanning), +} + +pub type Document = Vec; + +/// Parse an unstructured input value into a Rust data type. +/// +/// The conversion _can_ fail, and must in that case return None. Implemented +/// automatically by the convenience macros `graphql_enum!` and +/// `graphql_scalar!`. Must be implemented manually when manually exposing new +/// enums or scalars. +pub trait FromInputValue: Sized { + /// Performs the conversion. + fn from(v: &InputValue) -> Option; +} + +/// Losslessly clones a Rust data type into an InputValue. +pub trait ToInputValue: Sized { + /// Performs the conversion. + fn to(&self) -> InputValue; +} + +impl Type { + /// Get the name of a named type. + /// + /// Only applies to named types; lists will return `None`. + pub fn name(&self) -> Option<&str> { + match *self { + Type::Named(ref n) | Type::NonNullNamed(ref n) => Some(n), + _ => None + } + } + + /// Get the innermost name by unpacking lists + /// + /// All type literals contain exactly one named type. + pub fn innermost_name(&self) -> &str { + match *self { + Type::Named(ref n) | Type::NonNullNamed(ref n) => n, + Type::List(ref l) | Type::NonNullList(ref l) => l.innermost_name(), + } + } + + /// Determines if a type only can represent non-null values. + pub fn is_non_null(&self) -> bool { + match *self { + Type::NonNullNamed(_) | Type::NonNullList(_) => true, + _ => false, + } + } +} + +impl fmt::Display for Type { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Type::Named(ref n) => write!(f, "{}", n), + Type::NonNullNamed(ref n) => write!(f, "{}!", n), + Type::List(ref t) => write!(f, "[{}]", t), + Type::NonNullList(ref t) => write!(f, "[{}]!", t), + } + } +} + +impl InputValue { + /// Construct a null value. + pub fn null() -> InputValue { InputValue::Null } + + /// Construct an integer value. + pub fn int(i: i64) -> InputValue { InputValue::Int(i) } + + /// Construct a floating point value. + pub fn float(f: f64) -> InputValue { InputValue::Float(f) } + + /// Construct a boolean value. + pub fn boolean(b: bool) -> InputValue { InputValue::Boolean(b) } + + /// Construct a string value. + pub fn string>(s: T) -> InputValue { + InputValue::String(s.as_ref().to_owned()) + } + + /// Construct an enum value. + pub fn enum_value>(s: T) -> InputValue { + InputValue::Enum(s.as_ref().to_owned()) + } + + /// Construct a variable value. + pub fn variable>(v: T) -> InputValue { + InputValue::Variable(v.as_ref().to_owned()) + } + + /// Construct an unlocated list. + /// + /// Convenience function to make each `InputValue` in the input vector + /// not contain any location information. Can be used from `ToInputValue` + /// implementations, where no source code position information is available. + pub fn list(l: Vec) -> InputValue { + InputValue::List(l.into_iter().map(|i| Spanning::unlocated(i)).collect()) + } + + /// Construct a located list. + pub fn parsed_list(l: Vec>) -> InputValue { + InputValue::List(l) + } + + /// Construct an unlocated object. + /// + /// Similar to `InputValue::list`, it makes each key and value in the given + /// hash map not contain any location information. + pub fn object(o: HashMap) -> InputValue + where K: AsRef + Eq + Hash + { + InputValue::Object( + o.into_iter() + .map(|(k, v)| + (Spanning::unlocated(k.as_ref().to_owned()), Spanning::unlocated(v))) + .collect() + ) + } + + /// Construct a located object. + pub fn parsed_object(o: Vec<(Spanning, Spanning)>) -> InputValue { + InputValue::Object(o) + } + + /// Convert a `Json` structure into an `InputValue`. + /// + /// This consumes the JSON instance. + /// + /// Notes: + /// * No enums or variables will be produced by this method. + /// * All lists and objects will be unlocated + pub fn from_json(json: Json) -> InputValue { + match json { + Json::I64(i) => InputValue::int(i), + Json::U64(u) => InputValue::float(u as f64), + Json::F64(f) => InputValue::float(f), + Json::String(s) => InputValue::string(s), + Json::Boolean(b) => InputValue::boolean(b), + Json::Array(a) => InputValue::list(a.into_iter().map(InputValue::from_json).collect()), + Json::Object(o) => InputValue::object(o.into_iter().map(|(k,v)| (k, InputValue::from_json(v))).collect()), + Json::Null => InputValue::null(), + } + } + + /// Resolve all variables to their values. + pub fn into_const(self, vars: &HashMap) -> InputValue { + match self { + InputValue::Variable(v) => vars[&v].clone(), + InputValue::List(l) => InputValue::List( + l.into_iter().map(|s| s.map(|v| v.into_const(vars))).collect() + ), + InputValue::Object(o) => InputValue::Object( + o.into_iter().map(|(sk, sv)| (sk, sv.map(|v| v.into_const(vars)))).collect() + ), + v => v, + } + } + + /// Shorthand form of invoking `FromInputValue::from()`. + pub fn convert(&self) -> Option where T: FromInputValue { + ::from(self) + } + + /// Does the value represent null? + pub fn is_null(&self) -> bool { + match *self { + InputValue::Null => true, + _ => false, + } + } + + /// Does the value represent a variable? + pub fn is_variable(&self) -> bool { + match *self { + InputValue::Variable(_) => true, + _ => false, + } + } + + /// View the underlying enum value, if present. + pub fn as_enum_value(&self) -> Option<&str> { + match *self { + InputValue::Enum(ref e) => Some(e), + _ => None, + } + } + + /// View the underlying string value, if present. + pub fn as_string_value(&self) -> Option<&str> { + match *self { + InputValue::String(ref s) => Some(s), + _ => None, + } + } + + /// Convert the input value to an unlocated object value. + /// + /// This constructs a new hashmap that contain references to the keys + /// and values in `self`. + pub fn to_object_value(&self) -> Option> { + match *self { + InputValue::Object(ref o) => Some( + o.iter().map(|&(ref sk, ref sv)| (sk.item.as_str(), &sv.item)).collect()), + _ => None, + } + } + + /// Convert the input value to an unlocated list value. + /// + /// This constructs a new vector that contain references to the values + /// in `self`. + pub fn to_list_value(&self) -> Option> { + match *self { + InputValue::List(ref l) => Some(l.iter().map(|s| &s.item).collect()), + _ => None, + } + } + + /// Recursively find all variables + pub fn referenced_variables(&self) -> Vec<&str> { + match *self { + InputValue::Variable(ref name) => vec![name], + InputValue::List(ref l) => l.iter().flat_map(|v| v.item.referenced_variables()).collect(), + InputValue::Object(ref obj) => obj.iter().flat_map(|&(_, ref v)| v.item.referenced_variables()).collect(), + _ => vec![], + } + } + + /// Compare equality with another `InputValue` ignoring any source position information. + pub fn unlocated_eq(&self, other: &InputValue) -> bool { + use InputValue::*; + + match (self, other) { + (&Null, &Null) => true, + (&Int(i1), &Int(i2)) => i1 == i2, + (&Float(f1), &Float(f2)) => f1 == f2, + (&String(ref s1), &String(ref s2)) | + (&Enum(ref s1), &Enum(ref s2)) | + (&Variable(ref s1), &Variable(ref s2)) => s1 == s2, + (&Boolean(b1), &Boolean(b2)) => b1 == b2, + (&List(ref l1), &List(ref l2)) => + l1.iter().zip(l2.iter()).all(|(ref v1, ref v2)| v1.item.unlocated_eq(&v2.item)), + (&Object(ref o1), &Object(ref o2)) => + o1.len() == o2.len() + && o1.iter() + .all(|&(ref sk1, ref sv1)| o2.iter().any( + |&(ref sk2, ref sv2)| sk1.item == sk2.item && sv1.item.unlocated_eq(&sv2.item))), + _ => false + } + } +} + +impl ToJson for InputValue { + fn to_json(&self) -> Json { + match *self { + InputValue::Null | InputValue::Variable(_) => Json::Null, + InputValue::Int(i) => Json::I64(i), + InputValue::Float(f) => Json::F64(f), + InputValue::String(ref s) | InputValue::Enum(ref s) => Json::String(s.clone()), + InputValue::Boolean(b) => Json::Boolean(b), + InputValue::List(ref l) => Json::Array(l.iter().map(|x| x.item.to_json()).collect()), + InputValue::Object(ref o) => Json::Object(o.iter().map(|&(ref k, ref v)| (k.item.clone(), v.item.to_json())).collect()), + } + } +} + +impl Arguments { + pub fn into_iter(self) -> vec::IntoIter<(Spanning, Spanning)> { + self.items.into_iter() + } + + pub fn iter(&self) -> slice::Iter<(Spanning, Spanning)> { + self.items.iter() + } + + pub fn iter_mut(&mut self) -> slice::IterMut<(Spanning, Spanning)> { + self.items.iter_mut() + } + + pub fn drain<'a>(&'a mut self) -> vec::Drain<'a, (Spanning, Spanning)> { + self.items.drain(..) + } + + pub fn len(&self) -> usize { + self.items.len() + } + + pub fn get(&self, key: &str) -> Option<&Spanning> { + self.items + .iter() + .filter(|&&(ref k, _)| k.item == key) + .map(|&(_, ref v)| v) + .next() + } +} + +impl VariableDefinitions { + pub fn iter(&self) -> slice::Iter<(Spanning, VariableDefinition)> { + self.items.iter() + } +} diff --git a/src/integrations/iron_handlers.rs b/src/integrations/iron_handlers.rs new file mode 100644 index 00000000..f6c54778 --- /dev/null +++ b/src/integrations/iron_handlers.rs @@ -0,0 +1,323 @@ +//! Optional handlers for the Iron framework. Requires the `iron-handlers` feature enabled. + +use iron::prelude::*; +use iron::middleware::Handler; +use iron::mime::Mime; +use iron::status; +use iron::method; + +use std::collections::{HashMap, BTreeMap}; + +use rustc_serialize::json::{ToJson, Json}; + +use ::{InputValue, GraphQLType, RootNode, execute}; + +/// Handler that executes GraphQL queries in the given schema +/// +/// The handler responds to GET requests and POST requests only. In GET +/// requests, the query should be supplied in the `query` URL parameter, e.g. +/// `http://localhost:3000/graphql?query={hero{name}}`. +/// +/// POST requests support both queries and variables. POST a JSON document to +/// this endpoint containing the field `"query"` and optionally `"variables"`. +/// The variables should be a JSON object containing the variable to value +/// mapping. +pub struct GraphQLHandler + where CtxFactory: Fn(&mut Request) -> CtxT + Send + Sync + 'static, + CtxT: Send + Sync + 'static, + Query: GraphQLType + Send + Sync + 'static, + Mutation: GraphQLType + Send + Sync + 'static, +{ + context_factory: CtxFactory, + root_node: RootNode, +} + +/// Handler that renders GraphiQL - a graphical query editor interface +pub struct GraphiQLHandler { + graphql_url: String, +} + +impl + GraphQLHandler + where CtxFactory: Fn(&mut Request) -> CtxT + Send + Sync + 'static, + CtxT: Send + Sync + 'static, + Query: GraphQLType + Send + Sync + 'static, + Mutation: GraphQLType + Send + Sync + 'static, +{ + /// Build a new GraphQL handler + /// + /// The context factory will receive the Iron request object and is + /// expected to construct a context object for the given schema. This can + /// be used to construct e.g. database connections or similar data that + /// the schema needs to execute the query. + pub fn new(context_factory: CtxFactory, query: Query, mutation: Mutation) -> Self { + GraphQLHandler { + context_factory: context_factory, + root_node: RootNode::new(query, mutation), + } + } + + + fn handle_get(&self, req: &mut Request) -> IronResult { + let url = req.url.clone().into_generic_url(); + + let mut query = None; + let variables = HashMap::new(); + + for (k, v) in url.query_pairs() { + if k == "query" { + query = Some(v.into_owned()); + } + } + + let query = iexpect!(query); + + self.execute(req, &query, &variables) + } + + fn handle_post(&self, req: &mut Request) -> IronResult { + let json_data = itry!(Json::from_reader(&mut req.body)); + + let json_obj = match json_data { + Json::Object(o) => o, + _ => return Ok(Response::with((status::BadRequest, "No JSON object was decoded"))), + }; + + let mut query = None; + let mut variables = HashMap::new(); + + for (k, v) in json_obj.into_iter() { + if k == "query" { + query = v.as_string().map(|s| s.to_owned()); + } + else if k == "variables" { + variables = match InputValue::from_json(v).to_object_value() { + Some(o) => o.into_iter().map(|(k, v)| (k.to_owned(), v.clone())).collect(), + _ => HashMap::new(), + }; + } + } + + let query = iexpect!(query); + + self.execute(req, &query, &variables) + } + + fn execute(&self, req: &mut Request, query: &str, variables: &HashMap) -> IronResult { + let context = (self.context_factory)(req); + let result = execute(query, None, &self.root_node, variables, &context); + + let content_type = "application/json".parse::().unwrap(); + + match result { + Ok((result, errors)) => { + let mut map = BTreeMap::new(); + map.insert("data".to_owned(), result.to_json()); + if !errors.is_empty() { + map.insert("errors".to_owned(), errors.to_json()); + } + + let data = Json::Object(map); + let json = data.pretty(); + + Ok(Response::with((content_type, status::Ok, json.to_string()))) + } + + Err(err) => { + let data = err.to_json(); + let json = data.pretty(); + + Ok(Response::with((content_type, status::BadRequest, json.to_string()))) + } + } + } +} + +impl GraphiQLHandler { + /// Build a new GraphiQL handler targeting the specified URL. + /// + /// The provided URL should point to the URL of the attached `GraphQLHandler`. It can be + /// relative, so a common value could be `"/graphql"`. + pub fn new(graphql_url: &str) -> GraphiQLHandler { + GraphiQLHandler { + graphql_url: graphql_url.to_owned(), + } + } +} + +impl + Handler + for GraphQLHandler + where CtxFactory: Fn(&mut Request) -> CtxT + Send + Sync + 'static, + CtxT: Send + Sync + 'static, + Query: GraphQLType + Send + Sync + 'static, + Mutation: GraphQLType + Send + Sync + 'static, +{ + fn handle(&self, req: &mut Request) -> IronResult { + match req.method { + method::Get => self.handle_get(req), + method::Post => self.handle_post(req), + _ => Ok(Response::with((status::MethodNotAllowed))) + } + } +} + +impl Handler for GraphiQLHandler { + fn handle(&self, _: &mut Request) -> IronResult { + let content_type = "text/html".parse::().unwrap(); + + let stylesheet_source = r#" + + "#; + + let fetcher_source = r#" + + "#; + + let source = format!(r#" + + + + GraphQL + {stylesheet_source} + + + +
+ + + + + + + {fetcher_source} + + +"#, + graphql_url = self.graphql_url, + stylesheet_source = stylesheet_source, + fetcher_source = fetcher_source); + + Ok(Response::with((content_type, status::Ok, source))) + } +} + + +#[cfg(test)] +mod tests { + use rustc_serialize::json::Json; + + use iron::prelude::*; + use iron::status; + use iron::headers; + use iron_test::{request, response}; + use iron::{Handler, Headers}; + + use ::tests::model::Database; + + use super::GraphQLHandler; + + fn context_factory(_: &mut Request) -> Database { + Database::new() + } + + fn make_handler() -> Box { + Box::new(GraphQLHandler::new( + context_factory, + Database::new(), + (), + )) + } + + fn unwrap_json_response(resp: Response) -> Json { + let result = response::extract_body_to_string(resp); + + Json::from_str(&result).expect("Could not parse JSON object") + } + + #[test] + fn test_simple_get() { + let response = request::get( + "http://localhost:3000/?query={hero{name}}", + Headers::new(), + &make_handler()) + .expect("Unexpected IronError"); + + assert_eq!(response.status, Some(status::Ok)); + assert_eq!(response.headers.get::(), + Some(&headers::ContentType::json())); + + let json = unwrap_json_response(response); + + assert_eq!( + json, + Json::from_str(r#"{"data": {"hero": {"name": "R2-D2"}}}"#) + .expect("Invalid JSON constant in test")); + } + + #[test] + fn test_simple_post() { + let response = request::post( + "http://localhost:3000/", + Headers::new(), + r#"{"query": "{hero{name}}"}"#, + &make_handler()) + .expect("Unexpected IronError"); + + assert_eq!(response.status, Some(status::Ok)); + assert_eq!(response.headers.get::(), + Some(&headers::ContentType::json())); + + let json = unwrap_json_response(response); + + assert_eq!( + json, + Json::from_str(r#"{"data": {"hero": {"name": "R2-D2"}}}"#) + .expect("Invalid JSON constant in test")); + } + + #[test] + fn test_unsupported_method() { + let response = request::options( + "http://localhost:3000/?query={hero{name}}", + Headers::new(), + &make_handler()) + .expect("Unexpected IronError"); + + assert_eq!(response.status, Some(status::MethodNotAllowed)); + } +} diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs new file mode 100644 index 00000000..db7091e7 --- /dev/null +++ b/src/integrations/mod.rs @@ -0,0 +1 @@ +#[cfg(feature="iron-handlers")] pub mod iron_handlers; diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 00000000..63004480 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,340 @@ +/*! + +# GraphQL + +[GraphQL][1] is a data query language developed by Facebook intended to serve +mobile and web application frontends. A server provides a schema, containing +types and fields that applications can query. Queries are hierarchical, +composable, and statically typed. Schemas are introspective, which lets clients +statically verify their queries against a server without actually executing +them. + +This library provides data types and traits to expose Rust types in a GraphQL +schema, as well as an optional integration into the [Iron framework][2]. It +tries to keep the number of dynamic operations to a minimum, and give you as the +schema developer the control of the query execution path. + +## Exposing data types + +The `GraphQLType` trait is the primary interface towards application developers. +By deriving this trait, you can expose your types as either objects, enums, +interfaces, unions, or scalars. + +However, due to the dynamic nature of GraphQL's type system, deriving this trait +manually is a bit tedious, especially in order to do it in a fully type safe +manner. To help with this task, this library provides a couple of macros; the +most common one being `graphql_object!`. Use this macro to expose your already +existing object types as GraphQL objects: + +```rust +#[macro_use] extern crate juniper; +use juniper::FieldResult; +# use std::collections::HashMap; + +struct User { id: String, name: String, friend_ids: Vec } +struct QueryRoot; +struct Database { users: HashMap } + +// GraphQL objects can access a "context object" during execution. Use this +// object to provide e.g. database access to the field accessors. +// +// In this example, we use the Database struct as our context. +graphql_object!(User: Database as "User" |&self| { + + // Expose a simple field as a GraphQL string. + // FieldResult is an alias for Result - simply return + // a string from this method and it will be correctly inserted into + // the execution response. + field id() -> FieldResult<&String> { + Ok(&self.id) + } + + field name() -> FieldResult<&String> { + Ok(&self.name) + } + + // Field accessors can optionally take an "executor" as their first + // argument. This object can help guide query execution and provides + // access to the context instance. + // + // In this example, the context is used to convert the friend_ids array + // into actual User objects. + field friends(&mut executor) -> FieldResult> { + Ok(self.friend_ids.iter() + .filter_map(|id| executor.context().users.get(id)) + .collect()) + } +}); + +// The context object is passed down to all referenced types - all your exposed +// types need to have the same context type. +graphql_object!(QueryRoot: Database as "Query" |&self| { + + // Arguments work just like they do on functions. + field user(&mut executor, id: String) -> FieldResult> { + Ok(executor.context().users.get(&id)) + } +}); + +# fn main() { } +``` + +Adding per type, field, and argument documentation is possible directly from +this macro. For more in-depth information on how to expose fields and types, see +the [`graphql_object!`][3] macro. + +## Integrating with Iron + +The most obvious usecase is to expose the GraphQL schema over an HTTP endpoint. +To support this, the library provides an optional and customizable Iron handler. + +For example, continuing from the schema created above: + +```rust,no_run +extern crate iron; +# #[macro_use] extern crate juniper; +# use std::collections::HashMap; + +use iron::prelude::*; +use juniper::iron_handlers::GraphQLHandler; + +# use juniper::FieldResult; +# +# struct User { id: String, name: String, friend_ids: Vec } +# struct QueryRoot; +# struct Database { users: HashMap } +# +# graphql_object!(User: Database as "User" |&self| { +# field id() -> FieldResult<&String> { +# Ok(&self.id) +# } +# +# field name() -> FieldResult<&String> { +# Ok(&self.name) +# } +# +# field friends(&mut executor) -> FieldResult> { +# Ok(self.friend_ids.iter() +# .filter_map(|id| executor.context().users.get(id)) +# .collect()) +# } +# }); +# +# graphql_object!(QueryRoot: Database as "Query" |&self| { +# field user(&mut executor, id: String) -> FieldResult> { +# Ok(executor.context().users.get(&id)) +# } +# }); + +// This function is executed for every request. Here, we would realistically +// provide a database connection or similar. For this example, we'll be +// creating the database from scratch. +fn context_factory(_: &mut Request) -> Database { + Database { + users: vec![ + ( "1000".to_owned(), User { + id: "1000".to_owned(), name: "Robin".to_owned(), + friend_ids: vec!["1001".to_owned()] } ), + ( "1001".to_owned(), User { + id: "1001".to_owned(), name: "Max".to_owned(), + friend_ids: vec!["1000".to_owned()] } ), + ].into_iter().collect() + } +} + +fn main() { + // GraphQLHandler takes a context factory function, the root object, + // and the mutation object. If we don't have any mutations to expose, we + // can use the empty tuple () to indicate absence. + let graphql_endpoint = GraphQLHandler::new(context_factory, QueryRoot, ()); + + // Start serving the schema at the root on port 8080. + Iron::new(graphql_endpoint).http("localhost:8080").unwrap(); +} + +``` + +See the [`iron_handlers`][4] module and the [`GraphQLHandler`][5] documentation +for more information on what request methods are supported. There's also a +built-in [GraphiQL][6] handler included. + +[1]: http://graphql.org +[2]: http://ironframework.io +[3]: macro.graphql_object!.html +[4]: iron_handlers/index.html +[5]: iron_handlers/struct.GraphQLHandler.html +[6]: https://github.com/graphql/graphiql + +*/ + +#![cfg_attr(feature="nightly", feature(test))] +#![warn(missing_docs)] + +extern crate rustc_serialize; + +#[cfg(feature="nightly")] extern crate test; +#[cfg(feature="iron-handlers")] #[macro_use(itry, iexpect)] extern crate iron; +#[cfg(test)] extern crate iron_test; + +#[macro_use] mod macros; +mod ast; +pub mod parser; +mod value; +mod types; +mod schema; +pub mod validation; +mod integrations; + +#[cfg(test)] +mod tests; + +use std::collections::HashMap; + +use rustc_serialize::json::{ToJson, Json}; + +use parser::{parse_document_source, ParseError, Spanning, SourcePosition}; +use types::execute_validated_query; +use validation::{RuleError, ValidatorContext, visit_all_rules}; + +pub use ast::{ToInputValue, FromInputValue, InputValue, Type, Selection}; +pub use value::Value; +pub use types::base::{Arguments, GraphQLType, TypeKind}; +pub use types::schema::{Executor, Registry, ExecutionResult, ExecutionError, FieldResult}; +pub use types::scalars::ID; +pub use schema::model::RootNode; + +pub use schema::meta; + +#[cfg(feature="iron-handlers")] pub use integrations::iron_handlers; + +/// An error that prevented query execution +#[derive(Debug, PartialEq)] +#[allow(missing_docs)] +pub enum GraphQLError<'a> { + ParseError(Spanning>), + ValidationError(Vec), +} + +/// Execute a query in a provided schema +pub fn execute<'a, CtxT, QueryT, MutationT>( + document_source: &'a str, + operation_name: Option<&str>, + root_node: &RootNode, + variables: &HashMap, + context: &CtxT, +) + -> Result<(Value, Vec), GraphQLError<'a>> + where QueryT: GraphQLType, + MutationT: GraphQLType, +{ + let document = try!(parse_document_source(document_source)); + + { + let mut ctx = ValidatorContext::new(&root_node.schema, &document); + visit_all_rules(&mut ctx, &document); + + let errors = ctx.into_errors(); + if !errors.is_empty() { + return Err(GraphQLError::ValidationError(errors)); + } + } + + Ok(execute_validated_query(document, operation_name, root_node, variables, context)) +} + +impl<'a> From>> for GraphQLError<'a> { + fn from(f: Spanning>) -> GraphQLError<'a> { + GraphQLError::ParseError(f) + } +} + +impl<'a> ToJson for GraphQLError<'a> { + fn to_json(&self) -> Json { + let errs = match *self { + GraphQLError::ParseError(ref err) => parse_error_to_json(err), + GraphQLError::ValidationError(ref errs) => errs.to_json(), + }; + + Json::Object(vec![ + ("errors".to_owned(), errs), + ].into_iter().collect()) + } +} + +fn parse_error_to_json(err: &Spanning) -> Json { + Json::Array(vec![ + Json::Object(vec![ + ("message".to_owned(), format!("{}", err.item).to_json()), + ("locations".to_owned(), vec![ + Json::Object(vec![ + ("line".to_owned(), (err.start.line() + 1).to_json()), + ("column".to_owned(), (err.start.column() + 1).to_json()) + ].into_iter().collect()), + ].to_json()), + ].into_iter().collect()), + ]) +} + +impl ToJson for RuleError { + fn to_json(&self) -> Json { + Json::Object(vec![ + ("message".to_owned(), self.message().to_json()), + ("locations".to_owned(), self.locations().to_json()), + ].into_iter().collect()) + } +} + +impl ToJson for SourcePosition { + fn to_json(&self) -> Json { + Json::Object(vec![ + ("line".to_owned(), (self.line() + 1).to_json()), + ("column".to_owned(), (self.column() + 1).to_json()), + ].into_iter().collect()) + } +} + +impl ToJson for ExecutionError { + fn to_json(&self) -> Json { + Json::Object(vec![ + ("message".to_owned(), self.message().to_json()), + ("locations".to_owned(), vec![self.location().clone()].to_json()), + ("path".to_owned(), self.path().to_json()), + ].into_iter().collect()) + } +} + +#[doc(hidden)] +pub fn to_snake_case(s: &str) -> String { + let mut dest = String::new(); + + for (i, part) in s.split('_').enumerate() { + if i > 0 && part.len() == 1 { + dest.push_str(&part.to_uppercase()); + } + else if i > 0 && part.len() > 1 { + let first = part.chars().next().unwrap().to_uppercase().collect::(); + let second = &part[1..]; + + dest.push_str(&first); + dest.push_str(second); + } + else if i == 0 { + dest.push_str(part); + } + } + + dest +} + +#[test] +fn test_to_snake_case() { + assert_eq!(&to_snake_case("test")[..], "test"); + assert_eq!(&to_snake_case("_test")[..], "Test"); + assert_eq!(&to_snake_case("first_second")[..], "firstSecond"); + assert_eq!(&to_snake_case("first_")[..], "first"); + assert_eq!(&to_snake_case("a_b_c")[..], "aBC"); + assert_eq!(&to_snake_case("a_bc")[..], "aBc"); + assert_eq!(&to_snake_case("a_b")[..], "aB"); + assert_eq!(&to_snake_case("a")[..], "a"); + assert_eq!(&to_snake_case("")[..], ""); +} diff --git a/src/macros/args.rs b/src/macros/args.rs new file mode 100644 index 00000000..f2a520c8 --- /dev/null +++ b/src/macros/args.rs @@ -0,0 +1,156 @@ +#[doc(hidden)] +#[macro_export] +macro_rules! __graphql__args { + // Internal type conversion + ( @as_expr, $e:expr) => { $e }; + ( @as_pattern, $p:pat) => { $p }; + + ( + @assign_arg_vars, + $args:ident, $executorvar:ident, &mut $exec:ident + ) => { + let __graphql__args!(@as_pattern, $exec) = &mut $executorvar; + }; + + ( + @assign_arg_vars, + $args:ident, $executorvar:ident, &mut $exec:ident, $($rest:tt)* + ) => { + let __graphql__args!(@as_pattern, $exec) = &mut $executorvar; + __graphql__args!(@assign_arg_vars, $args, $executorvar, $($rest)*); + }; + + ( + @assign_arg_vars, + $args:ident, $executorvar:ident, + $name:ident : Option<$ty:ty> as $desc:expr, $($rest:tt)* + ) => { + let $name: Option<$ty> = $args + .get(&$crate::to_snake_case(stringify!($name))) + .unwrap_or(None); + __graphql__args!(@assign_arg_vars, $args, $executorvar, $($rest)*); + }; + + ( + @assign_arg_vars, + $args:ident, $executorvar:ident, + $name:ident : Option<$ty:ty> as $desc:expr + ) => { + let $name: Option<$ty> = $args + .get(&$crate::to_snake_case(stringify!($name))) + .unwrap_or(None); + }; + + ( + @assign_arg_vars, + $args:ident, $executorvar:ident, + $name:ident $(= $default:tt)* : $ty:ty $(as $desc:tt)*, $($rest:tt)* + ) => { + let $name: $ty = $args + .get(&$crate::to_snake_case(stringify!($name))) + .expect("Argument missing - validation must have failed"); + __graphql__args!(@assign_arg_vars, $args, $executorvar, $($rest)*); + }; + + ( + @assign_arg_vars, + $args:ident, $executorvar:ident, + $name:ident $(= $default:tt)* : $ty:ty $(as $desc:expr)* + ) => { + let $name: $ty = $args + .get(&$crate::to_snake_case(stringify!($name))) + .expect("Argument missing - validation must have failed"); + }; + + ( @assign_arg_vars, $args:ident, $executorvar:ident, ) => { + (); + }; + + ( + @apply_args, + $reg:expr, $base:expr, ( &mut executor ) + ) => { + $base + }; + + ( + @apply_args, + $reg:expr, $base:expr, ( &mut executor , $( $rest:tt )* ) + ) => { + __graphql__args!( + @apply_args, + $reg, + $base, + ( $($rest)* )) + }; + + ( + @apply_args, + $reg:expr, $base:expr, ( $name:ident = $default:tt : $t:ty ) + ) => { + $base.argument($reg.arg_with_default::<$t>( + &$crate::to_snake_case(stringify!($name)), + &__graphql__args!(@as_expr, $default))) + }; + + ( + @apply_args, + $reg:expr, $base:expr, ( $name:ident = $default:tt : $t:ty , $( $rest:tt )* ) + ) => { + __graphql__args!( + @apply_args, + $reg, + $base.argument($reg.arg_with_default::<$t>( + &$crate::to_snake_case(stringify!($name)), + &__graphql__args!(@as_expr, $default))), + ( $($rest)* )) + }; + + ( + @apply_args, + $reg:expr, $base:expr, ( $name:ident : $t:ty ) + ) => { + $base.argument($reg.arg::<$t>( + &$crate::to_snake_case(stringify!($name)))) + }; + + ( + @apply_args, + $reg:expr, $base:expr, ( $name:ident : $t:ty , $( $rest:tt )* ) + ) => { + __graphql__args!( + @apply_args, + $reg, + $base.argument($reg.arg::<$t>( + &$crate::to_snake_case(stringify!($name)))), + ( $($rest)* )) + }; + + ( + @apply_args, + $reg:expr, $base:expr, ( $name:ident : $t:ty as $desc:expr ) + ) => { + $base.argument( + $reg.arg::<$t>( + &$crate::to_snake_case(stringify!($name))) + .description($desc)) + }; + + ( + @apply_args, + $reg:expr, $base:expr, ( $name:ident : $t:ty as $desc:expr , $( $rest:tt )* ) + ) => { + __graphql__args!( + @apply_args, + $reg, + $base.argument( + $reg.arg::<$t>( + &$crate::to_snake_case(stringify!($name))) + .description($desc)), + ( $($rest)* )) + }; + + ( @apply_args, $reg:expr, $base:expr, ( ) ) => { + $base + }; +} diff --git a/src/macros/enums.rs b/src/macros/enums.rs new file mode 100644 index 00000000..3c3a9de7 --- /dev/null +++ b/src/macros/enums.rs @@ -0,0 +1,99 @@ +/** +Expose simple enums + +GraphQL enums are similar to enums classes C++ - more like grouped constants +with type safety than what Rust enums offer. This macro can be used to export +non-data carrying Rust enums to GraphQL: + +```rust +# #[macro_use] extern crate juniper; +enum Color { + Red, + Green, + Blue +} + +graphql_enum!(Color { + Color::Red => "RED", + Color::Green => "GREEN", + Color::Blue => "BLUE", +}); + +# fn main() { } +``` + +The macro expands to a `match` statement which will result in a compilation +error if not all enum variants are covered. It also creates an implementation +for `FromInputValue` and `ToInputValue`, making it usable in arguments and +default values. + +If you want to expose the enum under a different name than the Rust type, +you can write `graphql_enum!(Color as "MyColor" { ...`. + +*/ +#[macro_export] +macro_rules! graphql_enum { + ( @as_expr, $e:expr) => { $e }; + ( @as_pattern, $p:pat) => { $p }; + + // EnumName as "__ExportedNmae" { Enum::Value => "STRING_VALUE", } + // with no trailing comma + ( $name:path as $outname:tt { $($eval:path => $ename:tt),* }) => { + impl $crate::GraphQLType for $name { + fn name() -> Option<&'static str> { + Some(graphql_enum!(@as_expr, $outname)) + } + + fn meta(registry: &mut $crate::Registry) -> $crate::meta::MetaType { + registry.build_enum_type::<$name>()(&[ + $( $crate::meta::EnumValue::new(graphql_enum!(@as_expr, $ename)) ),* + ]) + .into_meta() + } + + fn resolve(&self, _: Option>, _: &mut $crate::Executor) -> $crate::Value { + match self { + $( + &graphql_enum!(@as_pattern, $eval) => + $crate::Value::string(graphql_enum!(@as_expr, $ename)) ),* + } + } + } + + impl $crate::FromInputValue for $name { + fn from(v: &$crate::InputValue) -> Option<$name> { + match v.as_enum_value() { + $( + Some(graphql_enum!(@as_pattern, $ename)) + => Some(graphql_enum!(@as_expr, $eval)), )* + _ => None, + } + } + } + + impl $crate::ToInputValue for $name { + fn to(&self) -> $crate::InputValue { + match self { + $( + &graphql_enum!(@as_pattern, $eval) => + $crate::InputValue::string(graphql_enum!(@as_expr, $ename)) ),* + } + } + } + }; + + // Same as above, *with* trailing comma + ( $name:path as $outname:tt { $($eval:path => $ename:tt, )* }) => { + graphql_enum!($name as $outname { $( $eval => $ename ),* }); + }; + + // Default named enum, without trailing comma + ( $name:path { $($eval:path => $ename:tt),* }) => { + graphql_enum!($name as (stringify!($name)) { $( $eval => $ename ),* }); + }; + + // Default named enum, with trailing comma + ( $name:path { $($eval:path => $ename:tt, )* }) => { + graphql_enum!($name as (stringify!($name)) { $( $eval => $ename ),* }); + }; +} diff --git a/src/macros/field.rs b/src/macros/field.rs new file mode 100644 index 00000000..1774c0d3 --- /dev/null +++ b/src/macros/field.rs @@ -0,0 +1,55 @@ +#[doc(hidden)] +#[macro_export] +macro_rules! __graphql__build_field_matches { + ( + $resolveargs:tt, + ( $( $acc:tt )* ), field $name:ident $args:tt -> $t:ty as $desc:tt $body:block $( $rest:tt )* + ) => { + __graphql__build_field_matches!( + $resolveargs, + (($name; $args; $t; $body) $( $acc )*), + $( $rest )*); + }; + + ( + $resolveargs:tt, + ( $( $acc:tt )* ), field $name:ident $args:tt -> $t:ty $body:block $( $rest:tt )* + ) => { + __graphql__build_field_matches!( + $resolveargs, + (($name; $args; $t; $body) $( $acc )*), + $( $rest )*); + }; + + ( $resolveargs:tt, $acc:tt, description : $value:tt $( $rest:tt )*) => { + __graphql__build_field_matches!($resolveargs, $acc, $( $rest )*); + }; + + ( $resolveargs:tt, $acc:tt, interfaces : $value:tt $( $rest:tt )*) => { + __graphql__build_field_matches!($resolveargs, $acc, $( $rest )*); + }; + + ( $resolveargs:tt, $acc:tt, instance_resolvers : | $execvar:pat | $resolvers:tt $( $rest:tt )*) => { + __graphql__build_field_matches!($resolveargs, $acc, $( $rest )*); + }; + + ( + ($outname:tt, $selfvar:ident, $fieldvar:ident, $argsvar:ident, $executorvar:ident), + ( $( ( $name:ident; ( $($args:tt)* ); $t:ty; $body:block ) )* ), + ) => { + $( + if $fieldvar == &$crate::to_snake_case(stringify!($name)) { + let result: $t = { + __graphql__args!( + @assign_arg_vars, + $argsvar, $executorvar, $($args)* + ); + $body + }; + + return result.and_then(|r| $executorvar.resolve(&r)) + } + )* + panic!("Field {} not found on type {}", $fieldvar, $outname); + }; +} diff --git a/src/macros/interface.rs b/src/macros/interface.rs new file mode 100644 index 00000000..0b99480c --- /dev/null +++ b/src/macros/interface.rs @@ -0,0 +1,317 @@ +/** +Expose GraphQL interfaces + +Mapping interfaces to GraphQL can be tricky: there is no direct counterpart to +GraphQL interfaces in Rust, and downcasting is not possible in the general case. +Many other GraphQL implementations in other languages use instance checks and +either dynamic typing or forced downcasts to support these features. + +A GraphQL interface defines fields that the implementing types also need to +implement. A GraphQL interface also needs to be able to determine the concrete +type name as well as downcast the general type to the actual concrete type. + +## Syntax + +See the documentation for [`graphql_object!`][1] on the general item and type +syntax. `graphql_interface!` requires an additional `instance_resolvers` item, +and does _not_ support the `interfaces` item. + +`instance_resolvers` is a list/lambda hybrid used to resolve the concrete +instance type of the interface. It starts with a context argument and continues +with an array of expressions, each resolving into an `Option` of the possible +instances: + +```rust,ignore +instance_resolvers: |&context| [ + context.get_human(self.id()), // returns Option + context.get_droid(self.id()), // returns Option +], +``` + +Each item in the array will be executed in order when the concrete type is +required. + +## Example + +A simplified extract from the StarWars schema example shows how to use the +shared context to implement downcasts. + +```rust +# #[macro_use] extern crate juniper; +# use juniper::FieldResult; +# use std::collections::HashMap; +struct Human { id: String } +struct Droid { id: String } +struct Database { + humans: HashMap, + droids: HashMap, +} + +trait Character { + fn id(&self) -> &str; +} + +impl Character for Human { + fn id(&self) -> &str { &self.id } +} + +impl Character for Droid { + fn id(&self) -> &str { &self.id } +} + +graphql_object!(Human: Database as "Human" |&self| { + field id() -> FieldResult<&str> { Ok(&self.id) } +}); + +graphql_object!(Droid: Database as "Droid" |&self| { + field id() -> FieldResult<&str> { Ok(&self.id) } +}); + +// You can introduce lifetimes or generic parameters by < > before the name. +graphql_interface!(<'a> &'a Character: Database as "Character" |&self| { + field id() -> FieldResult<&str> { Ok(self.id()) } + + instance_resolvers: |&context| [ + context.humans.get(self.id()), + context.droids.get(self.id()), + ] +}); + +# fn main() { } +``` + +[1]: macro.graphql_object!.html + +*/ +#[macro_export] +macro_rules! graphql_interface { + ( @as_item, $i:item) => { $i }; + ( @as_expr, $e:expr) => { $e }; + + ( + @gather_meta, + $reg:expr, $acc:expr, $descr:expr, + field $name:ident $args:tt -> $t:ty as $desc:tt $body:block $( $rest:tt )* + ) => { + $acc.push(__graphql__args!( + @apply_args, + $reg, + $reg.field_inside_result( + &$crate::to_snake_case(stringify!($name)), + Err("dummy".to_owned()) as $t) + .description($desc), + $args)); + + graphql_interface!(@gather_meta, $reg, $acc, $descr, $( $rest )*); + }; + + ( + @gather_meta, + $reg:expr, $acc:expr, $descr:expr, + field $name:ident $args:tt -> $t:ty $body:block $( $rest:tt )* + ) => { + $acc.push(__graphql__args!( + @apply_args, + $reg, + $reg.field_inside_result( + &$crate::to_snake_case(stringify!($name)), + Err("dummy".to_owned()) as $t), + $args)); + + graphql_interface!(@gather_meta, $reg, $acc, $descr, $( $rest )*); + }; + + ( + @gather_meta, + $reg:expr, $acc:expr, $descr:expr, + description : $value:tt $( $rest:tt )* + ) => { + $descr = Some(graphql_interface!(@as_expr, $value)); + + graphql_interface!(@gather_meta, $reg, $acc, $descr, $( $rest )*) + }; + + ( + @gather_meta, + $reg:expr, $acc:expr, $descr:expr, + instance_resolvers: | $execvar:pat | $resolvers:tt $( $rest:tt )* + ) => { + graphql_interface!(@gather_meta, $reg, $acc, $descr, $( $rest )*) + }; + + ( @gather_meta, $reg:expr, $acc:expr, $descr:expr, $(,)* ) => {}; + + ( + @resolve_into_type, + $buildargs:tt, + field $name:ident $args:tt -> $t:ty as $descr:tt $body:block $( $rest:tt )* + ) => { + graphql_interface!(@resolve_into_type, $buildargs, $( $rest )*) + }; + + ( + @resolve_into_type, + $buildargs:tt, + field $name:ident $args:tt -> $t:ty $body:block $( $rest:tt )* + ) => { + graphql_interface!(@resolve_into_type, $buildargs, $( $rest )*) + }; + + ( + @resolve_into_type, + $buildargs:tt, description : $value:tt $( $rest:tt )* + ) => { + graphql_interface!(@resolve_into_type, $buildargs, $( $rest )*) + }; + + ( + @resolve_into_type, + $buildargs:tt, interfaces : $value:tt $( $rest:tt )* + ) => { + graphql_interface!(@resolve_into_type, $buildargs, $( $rest )*) + }; + + ( + @concrete_type_name, + $buildargs:tt, + field $name:ident $args:tt -> $t:ty as $descr:tt $body:block $( $rest:tt )* + ) => { + graphql_interface!(@concrete_type_name, $buildargs, $( $rest )*) + }; + + ( + @concrete_type_name, + $buildargs:tt, + field $name:ident $args:tt -> $t:ty $body:block $( $rest:tt )* + ) => { + graphql_interface!(@concrete_type_name, $buildargs, $( $rest )*) + }; + + ( + @concrete_type_name, + $buildargs:tt, description : $value:tt $( $rest:tt )* + ) => { + graphql_interface!(@concrete_type_name, $buildargs, $( $rest )*) + }; + + ( + @concrete_type_name, + $buildargs:tt, interfaces : $value:tt $( $rest:tt )* + ) => { + graphql_interface!(@concrete_type_name, $buildargs, $( $rest )*) + }; + + ( + @concrete_type_name, + ($outname:tt, $ctxtarg:ident, $ctxttype:ty), + instance_resolvers : | $ctxtvar:pat | [ $( $resolver:expr , )* ] $( $rest:tt )* + ) => { + let $ctxtvar = &$ctxtarg; + + fn inner_type_of(_: T) -> String where T: $crate::GraphQLType<$ctxttype> { + T::name().unwrap().to_owned() + } + + $( + if let Some(ref v) = $resolver { + return inner_type_of(v); + } + )* + + panic!("Concrete type not handled by instance resolvers on {}", $outname); + }; + + ( @concrete_type_name, $buildargs:tt, ) => { + () + }; + + ( + @resolve_into_type, + ($outname:tt, $typenamearg:ident, $execarg:ident, $ctxttype:ty), + instance_resolvers : | $ctxtvar:pat | [ $( $resolver:expr , )* ] $( $rest:tt )* + ) => { + let $ctxtvar = &$execarg.context(); + + fn inner_type_of(_: T) -> String where T: $crate::GraphQLType<$ctxttype> { + T::name().unwrap().to_owned() + } + + $( + if let Some(ref v) = $resolver { + if inner_type_of(v) == $typenamearg { + return $execarg.resolve(v); + } + } + )* + + panic!("Concrete type not handled by instance resolvers on {}", $outname); + }; + + ( @resolve_into_type, $buildargs:tt, ) => { + () + }; + + ( + ( $($lifetime:tt),* ) $name:ty : $ctxt:ty as $outname:tt | &$mainself:ident | { + $( $items:tt )* + } + ) => { + graphql_interface!(@as_item, impl<$($lifetime)*> $crate::GraphQLType<$ctxt> for $name { + fn name() -> Option<&'static str> { + Some($outname) + } + + #[allow(unused_assignments)] + #[allow(unused_mut)] + fn meta(registry: &mut $crate::Registry<$ctxt>) -> $crate::meta::MetaType { + let mut fields = Vec::new(); + let mut description = None; + graphql_interface!(@gather_meta, registry, fields, description, $($items)*); + let mut mt = registry.build_interface_type::<$name>()(&fields); + + if let Some(description) = description { + mt = mt.description(description); + } + + mt.into_meta() + } + + #[allow(unused_variables)] + #[allow(unused_mut)] + fn resolve_field(&$mainself, field: &str, args: &$crate::Arguments, mut executor: &mut $crate::Executor<$ctxt>) -> $crate::ExecutionResult { + __graphql__build_field_matches!(($outname, $mainself, field, args, executor), (), $($items)*); + } + + fn concrete_type_name(&$mainself, context: &$ctxt) -> String { + graphql_interface!( + @concrete_type_name, + ($outname, context, $ctxt), + $($items)*); + } + + fn resolve_into_type(&$mainself, type_name: &str, _: Option>, executor: &mut $crate::Executor<$ctxt>) -> $crate::ExecutionResult { + graphql_interface!( + @resolve_into_type, + ($outname, type_name, executor, $ctxt), + $($items)*); + } + }); + }; + + ( + <$($lifetime:tt),*> $name:ty : $ctxt:ty as $outname:tt | &$mainself:ident | { + $( $items:tt )* + } + ) => { + graphql_interface!( + ($($lifetime),*) $name : $ctxt as $outname | &$mainself | { $( $items )* }); + }; + + ( + $name:ty : $ctxt:ty as $outname:tt | &$mainself:ident | { + $( $items:tt )* + } + ) => { + graphql_interface!(() $name : $ctxt as $outname | &$mainself | { $( $items )* }); + }; +} diff --git a/src/macros/mod.rs b/src/macros/mod.rs new file mode 100644 index 00000000..1a7db6ab --- /dev/null +++ b/src/macros/mod.rs @@ -0,0 +1,6 @@ +#[macro_use] mod enums; +#[macro_use] mod object; +#[macro_use] mod interface; +#[macro_use] mod scalar; +#[macro_use] mod args; +#[macro_use] mod field; diff --git a/src/macros/object.rs b/src/macros/object.rs new file mode 100644 index 00000000..bd68495d --- /dev/null +++ b/src/macros/object.rs @@ -0,0 +1,379 @@ +/** +Expose GraphQL objects + +This is a short-hand macro that implements the `GraphQLType` trait for a given +type. By using this macro instead of implementing it manually, you gain type +safety and reduce repetitive declarations. + +# Examples + +The simplest case exposes fields on a struct: + +```rust +# #[macro_use] extern crate juniper; +# use juniper::FieldResult; +struct User { id: String, name: String, group_ids: Vec } + +graphql_object!(User: () as "User" |&self| { + field id() -> FieldResult<&String> { + Ok(&self.id) + } + + field name() -> FieldResult<&String> { + Ok(&self.name) + } + + // Field and argument names will be converted from snake case to camel case, + // as is the common naming convention in GraphQL. The following field would + // be named "memberOfGroup", and the argument "groupId". + field member_of_group(group_id: String) -> FieldResult { + Ok(self.group_ids.iter().any(|gid| gid == &group_id)) + } +}); + +# fn main() { } +``` + +## Documentation and descriptions + +You can optionally add descriptions to the type itself, the fields, and field +arguments: + +```rust +# #[macro_use] extern crate juniper; +# use juniper::FieldResult; +struct User { id: String, name: String, group_ids: Vec } + +graphql_object!(User: () as "User" |&self| { + description: "A user in the database" + + field id() -> FieldResult<&String> as "The user's unique identifier" { + Ok(&self.id) + } + + field name() -> FieldResult<&String> as "The user's name" { + Ok(&self.name) + } + + field member_of_group( + group_id: String as "The group id you want to test membership against" + ) -> FieldResult as "Test if a user is member of a group" { + Ok(self.group_ids.iter().any(|gid| gid == &group_id)) + } +}); + +# fn main() { } +``` + +## Generics and lifetimes + +You can expose generic or pointer types by prefixing the type with the necessary +generic parameters: + +```rust +# #[macro_use] extern crate juniper; +# use juniper::FieldResult; +trait SomeTrait { fn id(&self) -> &str; } + +graphql_object!(<'a> &'a SomeTrait: () as "SomeTrait" |&self| { + field id() -> FieldResult<&str> { Ok(self.id()) } +}); + +struct GenericType { items: Vec } + +graphql_object!( GenericType: () as "GenericType" |&self| { + field count() -> FieldResult { Ok(self.items.len() as i64) } +}); + +struct SelfContained { name: String } + +// If the type does not require access to a specific context, you can make it +// generic on the context type. This statically ensures that the fields only +// can access what's available from the type itself. +graphql_object!( SelfContained: Context as "SelfContained" |&self| { + field name() -> FieldResult<&String> { Ok(&self.name) } +}); + +# fn main() { } +``` + +## Implementing interfaces + +You can use the `interfaces` item to implement interfaces: + +```rust +# #[macro_use] extern crate juniper; +# use juniper::FieldResult; +trait Interface { + fn id(&self) -> &str; + fn as_implementor(&self) -> Option; +} +struct Implementor { id: String } + +graphql_interface!(<'a> &'a Interface: () as "Interface" |&self| { + field id() -> FieldResult<&str> { Ok(self.id()) } + + instance_resolvers: |&context| [ + self.as_implementor(), + ] +}); + +graphql_object!(Implementor: () as "Implementor" |&self| { + field id() -> FieldResult<&str> { Ok(&self.id) } + + interfaces: [&Interface] +}); + +# fn main() { } +``` + +Note that the implementing type does not need to implement the trait on the Rust +side - only what's in the GraphQL schema matters. The GraphQL interface doesn't +even have to be backed by a trait! + +## Emitting errors + +`FieldResult` is a simple type alias for `Result`. In the end, +errors that fields emit are serialized into strings in the response. However, +the execution system will keep track of the source of all errors, and will +continue executing despite some fields failing. + +``` +# #[macro_use] extern crate juniper; +# use juniper::FieldResult; +struct User { id: String } + +graphql_object!(User: () as "User" |&self| { + field id() -> FieldResult<&String> { + Ok(&self.id) + } + + field name() -> FieldResult<&String> { + Err("Does not have a name".to_owned()) + } +}); + +# fn main() { } +``` + +# Syntax + +The top-most syntax of this macro defines which type to expose, the context +type, which lifetime parameters or generics to define, and which name to use in +the GraphQL schema. It takes one of the following two forms: + +```text +ExposedType: ContextType as "ExposedName" |&self| { items... } + ExposedType: ContextType as "ExposedName" |&self| { items... } +``` + +## Items + +Each item within the brackets of the top level declaration has its own syntax. +The order of individual items does not matter. `graphql_object!` supports a +number of different items. + +### Top-level description + +```text +description: "Top level description" +``` + +Adds documentation to the type in the schema, usable by tools such as GraphiQL. + +### Interfaces + +```text +interfaces: [&Interface, ...] +``` + +Informs the schema that the type implements the specified interfaces. This needs +to be _GraphQL_ interfaces, not necessarily Rust traits. The Rust types do not +need to have any connection, only what's exposed in the schema matters. + +### Fields + +```text +field name(args...) -> FieldResult { } +field name(args...) -> FieldResult as "Field description" { } +``` + +Defines a field on the object. The name is converted to camel case, e.g. +`user_name` is exposed as `userName`. The `as "Field description"` adds the +string as documentation on the field. + +### Field arguments + +```text +&mut executor +arg_name: ArgType +arg_name = default_value: ArgType +arg_name: ArgType as "Argument description" +arg_name = default_value: ArgType as "Argument description" +``` + +Field arguments can take many forms. If the field needs access to the executor +or context, it can take an [Executor][1] instance by specifying `&mut executor` +as the first argument. + +The other cases are similar to regular Rust arguments, with two additions: +argument documentation can be added by appending `as "Description"` after the +type, and a default value can be specified by appending `= value` after the +argument name. + +Arguments are required (i.e. non-nullable) by default. If you specify _either_ a +default value, _or_ make the type into an `Option<>`, the argument becomes +optional. For example: + +```text +arg_name: String -- required +arg_name: Option -- optional, None if unspecified +arg_name = "default": String -- optional "default" if unspecified +``` + +[1]: struct.Executor.html + +*/ +#[macro_export] +macro_rules! graphql_object { + ( @as_item, $i:item) => { $i }; + ( @as_expr, $e:expr) => { $e }; + + ( + @gather_object_meta, + $reg:expr, $acc:expr, $descr:expr, $ifaces:expr, + field $name:ident $args:tt -> $t:ty as $desc:tt $body:block $( $rest:tt )* + ) => { + $acc.push(__graphql__args!( + @apply_args, + $reg, + $reg.field_inside_result( + &$crate::to_snake_case(stringify!($name)), + Err("dummy".to_owned()) as $t) + .description($desc), + $args)); + + graphql_object!(@gather_object_meta, $reg, $acc, $descr, $ifaces, $( $rest )*); + }; + + ( + @gather_object_meta, + $reg:expr, $acc:expr, $descr:expr, $ifaces:expr, + field $name:ident $args:tt -> $t:ty $body:block $( $rest:tt )* + ) => { + $acc.push(__graphql__args!( + @apply_args, + $reg, + $reg.field_inside_result( + &$crate::to_snake_case(stringify!($name)), + Err("dummy".to_owned()) as $t), + $args)); + + graphql_object!(@gather_object_meta, $reg, $acc, $descr, $ifaces, $( $rest )*); + }; + + ( + @gather_object_meta, + $reg:expr, $acc:expr, $descr:expr, $ifaces:expr, + description : $value:tt $( $rest:tt )* + ) => { + $descr = Some(graphql_object!(@as_expr, $value)); + + graphql_object!(@gather_object_meta, $reg, $acc, $descr, $ifaces, $( $rest )*) + }; + + ( + @gather_object_meta, + $reg:expr, $acc:expr, $descr:expr, $ifaces:expr, + interfaces : $value:tt $( $rest:tt )* + ) => { + graphql_object!(@assign_interfaces, $reg, $ifaces, $value); + + graphql_object!(@gather_object_meta, $reg, $acc, $descr, $ifaces, $( $rest )*) + }; + + ( + @gather_object_meta, + $reg:expr, $acc:expr, $descr:expr, $ifaces:expr, $(,)* + ) => {}; + + ( @assign_interfaces, $reg:expr, $tgt:expr, [ $($t:ty,)* ] ) => { + $tgt = Some(vec![ + $($reg.get_type::<$t>()),* + ]); + }; + + ( @assign_interfaces, $reg:expr, $tgt:expr, [ $($t:ty),* ] ) => { + $tgt = Some(vec![ + $($reg.get_type::<$t>()),* + ]); + }; + + ( + ( $($lifetime:tt)* ); + $name:ty; $ctxt:ty; $outname:expr; $mainself:ident; $($items:tt)* + ) => { + graphql_object!(@as_item, impl<$($lifetime)*> $crate::GraphQLType<$ctxt> for $name { + fn name() -> Option<&'static str> { + Some($outname) + } + + #[allow(unused_assignments)] + #[allow(unused_mut)] + fn meta(registry: &mut $crate::Registry<$ctxt>) -> $crate::meta::MetaType { + let mut fields = Vec::new(); + let mut description = None; + let mut interfaces: Option> = None; + graphql_object!( + @gather_object_meta, + registry, fields, description, interfaces, $($items)* + ); + let mut mt = registry.build_object_type::<$name>()(&fields); + + if let Some(description) = description { + mt = mt.description(description); + } + + if let Some(interfaces) = interfaces { + mt = mt.interfaces(&interfaces); + } + + mt.into_meta() + } + + #[allow(unused_variables)] + #[allow(unused_mut)] + fn resolve_field( + &$mainself, + field: &str, + args: &$crate::Arguments, + mut executor: &mut $crate::Executor<$ctxt> + ) + -> $crate::ExecutionResult + { + __graphql__build_field_matches!( + ($outname, $mainself, field, args, executor), + (), + $($items)*); + } + }); + }; + + ( + <$( $lifetime:tt ),*> $name:ty : $ctxt:ty as $outname:tt | &$mainself:ident | { + $( $items:tt )* + } + ) => { + graphql_object!( + ( $($lifetime),* ); $name; $ctxt; $outname; $mainself; $( $items )*); + }; + + ( + $name:ty : $ctxt:ty as $outname:tt | &$mainself:ident | { + $( $items:tt )* + } + ) => { + graphql_object!( + ( ); $name; $ctxt; $outname; $mainself; $( $items )*); + }; +} diff --git a/src/macros/scalar.rs b/src/macros/scalar.rs new file mode 100644 index 00000000..6f203e4b --- /dev/null +++ b/src/macros/scalar.rs @@ -0,0 +1,109 @@ +/** +Expose GraphQL scalars + +The GraphQL language defines a number of built-in scalars: strings, numbers, and +booleans. This macro can be used either to define new types of scalars (e.g. +timestamps), or expose other types as one of the built-in scalars (e.g. bigints +as numbers or strings). + +Since the preferred transport protocol for GraphQL responses is JSON, most +custom scalars will be transferred as strings. You therefore need to ensure that +the client library you are sending data to can parse the custom value into a +datatype appropriate for that platform. + +```rust +# #[macro_use] extern crate juniper; +# use juniper::{Value, FieldResult}; +struct UserID(String); + +graphql_scalar!(UserID as "UserID" { + resolve(&self) -> Value { + Value::string(&self.0) + } + + from_input_value(v: &InputValue) -> Option { + v.as_string_value().map(|s| UserID(s.to_owned())) + } +}); + +# fn main() { } +``` + +In addition to implementing `GraphQLType` for the type in question, +`FromInputValue` and `ToInputValue` is also implemented. This makes the type +usable as arguments and default values. + +`graphql_scalar!` supports generic and lifetime parameters similar to +`graphql_object!`. + +*/ +#[macro_export] +macro_rules! graphql_scalar { + ( + @build_scalar_resolver, + resolve(&$selfvar:ident) -> Value $body:block $($rest:tt)* + ) => { + fn resolve(&$selfvar, _: Option>, _: &mut $crate::Executor) -> $crate::Value { + $body + } + }; + + ( + @build_scalar_conv_impl, + $name:ty; [$($lifetime:tt),*]; + resolve(&$selfvar:ident) -> Value $body:block $($rest:tt)* + ) => { + impl<$($lifetime),*> $crate::ToInputValue for $name { + fn to(&$selfvar) -> $crate::InputValue { + $crate::ToInputValue::to(&$body) + } + } + + graphql_scalar!(@build_scalar_conv_impl, $name; [$($lifetime),*]; $($rest)*); + }; + + ( + @build_scalar_conv_impl, + $name:ty; [$($lifetime:tt),*]; + from_input_value($arg:ident: &InputValue) -> $result:ty $body:block + $($rest:tt)* + ) => { + impl<$($lifetime),*> $crate::FromInputValue for $name { + fn from($arg: &$crate::InputValue) -> $result { + $body + } + } + + graphql_scalar!(@build_scalar_conv_impl, $name; [$($lifetime),*]; $($rest)*); + }; + + ( + @build_scalar_conv_impl, + $name:ty; $($lifetime:tt),*; + ) => { + }; + + (($($lifetime:tt),*) $name:ty as $outname:expr => { $( $items:tt )* }) => { + impl<$($lifetime,)* CtxT> $crate::GraphQLType for $name { + fn name() -> Option<&'static str> { + Some($outname) + } + + fn meta(registry: &mut $crate::Registry) -> $crate::meta::MetaType { + registry.build_scalar_type::().into_meta() + } + + graphql_scalar!(@build_scalar_resolver, $($items)*); + } + + graphql_scalar!(@build_scalar_conv_impl, $name; [$($lifetime),*]; $($items)*); + }; + + (<$($lifetime:tt),*> $name:ty as $outname:tt { $( $items:tt )* }) => { + graphql_scalar!(($($lifetime),*) $name as $outname => { $( $items )* }); + }; + + ( $name:ty as $outname:tt { $( $items:tt )* }) => { + graphql_scalar!(() $name as $outname => { $( $items )* }); + } +} diff --git a/src/parser/document.rs b/src/parser/document.rs new file mode 100644 index 00000000..4592000e --- /dev/null +++ b/src/parser/document.rs @@ -0,0 +1,339 @@ +use ast::{Definition, Document, OperationType, + VariableDefinitions, VariableDefinition, InputValue, + Operation, Fragment, Selection, Directive, Field, Arguments, + FragmentSpread, InlineFragment, Type}; + +use parser::{Lexer, Parser, Spanning, UnlocatedParseResult, OptionParseResult, ParseResult, ParseError, Token}; +use parser::value::parse_value_literal; + +#[doc(hidden)] +pub fn parse_document_source(s: &str) -> UnlocatedParseResult { + let mut lexer = Lexer::new(s); + let mut parser = try!(Parser::new(&mut lexer).map_err(|s| s.map(ParseError::LexerError))); + parse_document(&mut parser) +} + +fn parse_document<'a>(parser: &mut Parser<'a>) -> UnlocatedParseResult<'a, Document> { + let mut defs = Vec::new(); + + loop { + defs.push(try!(parse_definition(parser))); + + if parser.peek().item == Token::EndOfFile { + return Ok(defs); + } + } +} + +fn parse_definition<'a>(parser: &mut Parser<'a>) -> UnlocatedParseResult<'a, Definition> { + match parser.peek().item { + Token::CurlyOpen | Token::Name("query") | Token::Name("mutation") => + Ok(Definition::Operation(try!(parse_operation_definition(parser)))), + Token::Name("fragment") => + Ok(Definition::Fragment(try!(parse_fragment_definition(parser)))), + _ => Err(parser.next().map(ParseError::UnexpectedToken)), + } +} + +fn parse_operation_definition<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, Operation> { + if parser.peek().item == Token::CurlyOpen { + let selection_set = try!(parse_selection_set(parser)); + + Ok(Spanning::start_end( + &selection_set.start, + &selection_set.end, + Operation { + operation_type: OperationType::Query, + name: None, + variable_definitions: None, + directives: None, + selection_set: selection_set.item, + })) + } + else { + let start_pos = parser.peek().start.clone(); + let operation_type = try!(parse_operation_type(parser)); + let name = match parser.peek().item { + Token::Name(_) => Some(try!(parser.expect_name())), + _ => None + }; + let variable_definitions = try!(parse_variable_definitions(parser)); + let directives = try!(parse_directives(parser)); + let selection_set = try!(parse_selection_set(parser)); + + Ok(Spanning::start_end( + &start_pos, + &selection_set.end, + Operation { + operation_type: operation_type.item, + name: name, + variable_definitions: variable_definitions, + directives: directives.map(|s| s.item), + selection_set: selection_set.item, + })) + } +} + +fn parse_fragment_definition<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, Fragment> { + let Spanning { start: start_pos, .. } = try!(parser.expect(&Token::Name("fragment"))); + let name = match parser.expect_name() { + Ok(n) => if &n.item == "on" { + return Err(n.map(|_| ParseError::UnexpectedToken(Token::Name("on")))); + } + else { + n + }, + Err(e) => return Err(e), + }; + + try!(parser.expect(&Token::Name("on"))); + let type_cond = try!(parser.expect_name()); + let directives = try!(parse_directives(parser)); + let selection_set = try!(parse_selection_set(parser)); + + Ok(Spanning::start_end( + &start_pos, + &selection_set.end, + Fragment { + name: name, + type_condition: type_cond, + directives: directives.map(|s| s.item), + selection_set: selection_set.item, + })) +} + +fn parse_optional_selection_set<'a>(parser: &mut Parser<'a>) -> OptionParseResult<'a, Vec> { + if parser.peek().item == Token::CurlyOpen { + Ok(Some(try!(parse_selection_set(parser)))) + } + else { + Ok(None) + } +} + +fn parse_selection_set<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, Vec> { + parser.unlocated_delimited_nonempty_list( + &Token::CurlyOpen, + parse_selection, + &Token::CurlyClose) +} + +fn parse_selection<'a>(parser: &mut Parser<'a>) -> UnlocatedParseResult<'a, Selection> { + match parser.peek().item { + Token::Ellipsis => parse_fragment(parser), + _ => parse_field(parser).map(Selection::Field), + } +} + +fn parse_fragment<'a>(parser: &mut Parser<'a>) -> UnlocatedParseResult<'a, Selection> { + let Spanning { start: ref start_pos, .. } = try!(parser.expect(&Token::Ellipsis)); + + match parser.peek().item { + Token::Name("on") => { + parser.next(); + let name = try!(parser.expect_name()); + let directives = try!(parse_directives(parser)); + let selection_set = try!(parse_selection_set(parser)); + + Ok(Selection::InlineFragment( + Spanning::start_end( + &start_pos.clone(), + &selection_set.end, + InlineFragment { + type_condition: Some(name), + directives: directives.map(|s| s.item), + selection_set: selection_set.item, + }))) + }, + Token::CurlyOpen => { + let selection_set = try!(parse_selection_set(parser)); + + Ok(Selection::InlineFragment( + Spanning::start_end( + &start_pos.clone(), + &selection_set.end, + InlineFragment { + type_condition: None, + directives: None, + selection_set: selection_set.item, + }))) + }, + Token::Name(_) => { + let frag_name = try!(parser.expect_name()); + let directives = try!(parse_directives(parser)); + + Ok(Selection::FragmentSpread( + Spanning::start_end( + &start_pos.clone(), + &directives.as_ref().map_or(&frag_name.end, |s| &s.end).clone(), + FragmentSpread { + name: frag_name, + directives: directives.map(|s| s.item), + }))) + }, + _ => Err(parser.next().map(ParseError::UnexpectedToken)), + } +} + +fn parse_field<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, Field> { + let mut alias = Some(try!(parser.expect_name())); + + let name = if try!(parser.skip(&Token::Colon)).is_some() { + try!(parser.expect_name()) + } + else { + alias.take().unwrap() + }; + + let arguments = try!(parse_arguments(parser)); + let directives = try!(parse_directives(parser)); + let selection_set = try!(parse_optional_selection_set(parser)); + + Ok(Spanning::start_end( + &alias.as_ref().unwrap_or(&name).start.clone(), + &selection_set.as_ref().map(|s| &s.end) + .or_else(|| directives.as_ref().map(|s| &s.end)) + .or_else(|| arguments.as_ref().map(|s| &s.end)) + .unwrap_or(&name.end) + .clone(), + Field { + alias: alias, + name: name, + arguments: arguments, + directives: directives.map(|s| s.item), + selection_set: selection_set.map(|s| s.item), + })) +} + +fn parse_arguments<'a>(parser: &mut Parser<'a>) -> OptionParseResult<'a, Arguments> { + if parser.peek().item != Token::ParenOpen { + Ok(None) + } else { + Ok(Some(try!(parser.delimited_nonempty_list( + &Token::ParenOpen, + parse_argument, + &Token::ParenClose + )).map(|args| Arguments { items: args.into_iter().map(|s| s.item).collect() }))) + } +} + +fn parse_argument<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, (Spanning, Spanning)> { + let name = try!(parser.expect_name()); + try!(parser.expect(&Token::Colon)); + let value = try!(parse_value_literal(parser, false)); + + Ok(Spanning::start_end( + &name.start.clone(), + &value.end.clone(), + (name, value))) +} + +fn parse_operation_type<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, OperationType> { + match parser.peek().item { + Token::Name("query") => Ok(parser.next().map(|_| OperationType::Query)), + Token::Name("mutation") => Ok(parser.next().map(|_| OperationType::Mutation)), + _ => Err(parser.next().map(ParseError::UnexpectedToken)) + } +} + +fn parse_variable_definitions<'a>(parser: &mut Parser<'a>) -> OptionParseResult<'a, VariableDefinitions> { + if parser.peek().item != Token::ParenOpen { + Ok(None) + } + else { + Ok(Some(try!(parser.delimited_nonempty_list( + &Token::ParenOpen, + parse_variable_definition, + &Token::ParenClose + )).map(|defs| VariableDefinitions { items: defs.into_iter().map(|s| s.item).collect() }))) + } +} + +fn parse_variable_definition<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, (Spanning, VariableDefinition)> { + let Spanning { start: start_pos, .. } = try!(parser.expect(&Token::Dollar)); + let var_name = try!(parser.expect_name()); + try!(parser.expect(&Token::Colon)); + let var_type = try!(parse_type(parser)); + + let default_value = if try!(parser.skip(&Token::Equals)).is_some() { + Some(try!(parse_value_literal(parser, true))) + } + else { + None + }; + + Ok(Spanning::start_end( + &start_pos, + &default_value.as_ref().map_or(&var_type.end, |s| &s.end).clone(), + ( + Spanning::start_end( + &start_pos, + &var_name.end, + var_name.item + ), + VariableDefinition { + var_type: var_type, + default_value: default_value, + } + ))) +} + +fn parse_directives<'a>(parser: &mut Parser<'a>) -> OptionParseResult<'a, Vec>> { + if parser.peek().item != Token::At { + Ok(None) + } + else { + let mut items = Vec::new(); + while parser.peek().item == Token::At { + items.push(try!(parse_directive(parser))); + } + + Ok(Spanning::spanning(items)) + } +} + +fn parse_directive<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, Directive> { + let Spanning { start: start_pos, .. } = try!(parser.expect(&Token::At)); + let name = try!(parser.expect_name()); + let arguments = try!(parse_arguments(parser)); + + Ok(Spanning::start_end( + &start_pos, + &arguments.as_ref().map_or(&name.end, |s| &s.end).clone(), + Directive { + name: name, + arguments: arguments, + })) +} + +pub fn parse_type<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, Type> { + let parsed_type = if let Some(Spanning { start: start_pos, ..}) = try!(parser.skip(&Token::BracketOpen)) { + let inner_type = try!(parse_type(parser)); + let Spanning { end: end_pos, .. } = try!(parser.expect(&Token::BracketClose)); + Spanning::start_end( + &start_pos, + &end_pos, + Type::List(Box::new(inner_type.item))) + } + else { + try!(parser.expect_name()).map(Type::Named) + }; + + Ok(match *parser.peek() { + Spanning { item: Token::ExclamationMark, .. } => + try!(wrap_non_null(parser, parsed_type)), + _ => parsed_type + }) +} + +fn wrap_non_null<'a>(parser: &mut Parser<'a>, inner: Spanning) -> ParseResult<'a, Type> { + let Spanning { end: end_pos, .. } = try!(parser.expect(&Token::ExclamationMark)); + + let wrapped = match inner.item { + Type::Named(name) => Type::NonNullNamed(name), + Type::List(l) => Type::NonNullList(l), + t => t, + }; + + Ok(Spanning::start_end(&inner.start, &end_pos, wrapped)) +} diff --git a/src/parser/lexer.rs b/src/parser/lexer.rs new file mode 100644 index 00000000..ebbe3869 --- /dev/null +++ b/src/parser/lexer.rs @@ -0,0 +1,522 @@ +use std::char; +use std::str::CharIndices; +use std::iter::{Iterator, Peekable}; +use std::result::Result; +use std::fmt; + +use parser::{SourcePosition, Spanning}; + +#[doc(hidden)] +#[derive(Debug)] +pub struct Lexer<'a> { + iterator: Peekable>, + source: &'a str, + length: usize, + position: SourcePosition, + has_reached_eof: bool, +} + +/// A single token in the input source +#[derive(Debug, PartialEq)] +#[allow(missing_docs)] +pub enum Token<'a> { + Name(&'a str), + Int(i64), + Float(f64), + String(String), + ExclamationMark, + Dollar, + ParenOpen, + ParenClose, + BracketOpen, + BracketClose, + CurlyOpen, + CurlyClose, + Ellipsis, + Colon, + Equals, + At, + Pipe, + EndOfFile, +} + +/// Error when tokenizing the input source +#[derive(Debug, PartialEq, Eq)] +pub enum LexerError { + /// An unknown character was found + /// + /// Unknown characters are characters that do not occur anywhere in the + /// GraphQL language, such as `?` or `%`. + UnknownCharacter(char), + + /// An unexpected character was found + /// + /// Unexpected characters are characters that _do_ exist in the GraphQL + /// language, but is not expected at the current position in the document. + UnexpectedCharacter(char), + + /// An unterminated string literal was found + /// + /// Apart from forgetting the ending `"`, terminating a string within a + /// Unicode escape sequence or having a line break in the string also + /// causes this error. + UnterminatedString, + + /// An unknown character in a string literal was found + /// + /// This occurs when an invalid source character is found in a string + /// literal, such as ASCII control characters. + UnknownCharacterInString(char), + + /// An unknown escape sequence in a string literal was found + /// + /// Only a limited set of escape sequences are supported, this is emitted + /// when e.g. `"\l"` is parsed. + UnknownEscapeSequence(String), + + /// The input source was unexpectedly terminated + /// + /// Emitted when the current token requires a succeeding character, but + /// the source has reached EOF. Emitted when scanning e.g. `"1."`. + UnexpectedEndOfFile, + + /// An invalid number literal was found + InvalidNumber, +} + +pub type LexerResult<'a> = Result>, Spanning>; + +impl<'a> Lexer<'a> { + #[doc(hidden)] + pub fn new(source: &'a str) -> Lexer<'a> { + Lexer { + iterator: source.char_indices().peekable(), + source: source, + length: source.len(), + position: SourcePosition::new_origin(), + has_reached_eof: false, + } + } + + fn peek_char(&mut self) -> Option<(usize, char)> { + assert!(self.position.index() <= self.length); + assert!(!self.has_reached_eof); + + self.iterator.peek().map(|&(idx, ch)| (idx, ch)) + } + + fn next_char(&mut self) -> Option<(usize, char)> { + assert!(self.position.index() <= self.length); + assert!(!self.has_reached_eof); + + let next = self.iterator.next(); + + if let Some((_, ch)) = next { + if ch == '\n' { + self.position.advance_line(); + } + else { + self.position.advance_col(); + } + } + + next + } + + fn emit_single_char(&mut self, t: Token<'a>) -> Spanning> { + assert!(self.position.index() <= self.length); + + let start_pos = self.position.clone(); + + self.next_char() + .expect("Internal error in GraphQL lexer: emit_single_char reached EOF"); + + Spanning::single_width(&start_pos, t) + } + + fn scan_over_whitespace(&mut self) { + while let Some((_, ch)) = self.peek_char() { + if ch == '\t' || ch == ' ' || ch == '\n' || ch == '\r' || ch == ',' { + self.next_char(); + } + else if ch == '#' { + self.next_char(); + + while let Some((_, ch)) = self.peek_char() { + if is_source_char(ch) && (ch == '\n' || ch == '\r') { + self.next_char(); + break; + } + else if is_source_char(ch) { + self.next_char(); + } + else { + break; + } + } + } + else { + break; + } + } + } + + fn scan_ellipsis(&mut self) -> LexerResult<'a> { + let start_pos = self.position.clone(); + + for _ in 0..3 { + let (_, ch) = try!(self.next_char().ok_or(Spanning::zero_width(&self.position, LexerError::UnexpectedEndOfFile))); + if ch != '.' { + return Err(Spanning::zero_width(&start_pos, LexerError::UnexpectedCharacter('.'))); + } + } + + Ok(Spanning::start_end(&start_pos, &self.position, Token::Ellipsis)) + } + + fn scan_name(&mut self) -> LexerResult<'a> { + let start_pos = self.position.clone(); + let (start_idx, start_ch) = try!(self.next_char().ok_or( + Spanning::zero_width(&self.position, LexerError::UnexpectedEndOfFile))); + assert!(is_name_start(start_ch)); + + let mut end_idx = start_idx; + + while let Some((idx, ch)) = self.peek_char() { + if is_name_cont(ch) { + self.next_char(); + end_idx = idx; + } + else { + break; + } + } + + Ok(Spanning::start_end( + &start_pos, + &self.position, + Token::Name(&self.source[start_idx..end_idx + 1]))) + } + + fn scan_string(&mut self) -> LexerResult<'a> { + let start_pos = self.position.clone(); + let (_, start_ch) = try!(self.next_char().ok_or( + Spanning::zero_width(&self.position, LexerError::UnexpectedEndOfFile))); + assert!(start_ch == '"'); + + let mut acc = String::new(); + + while let Some((_, ch)) = self.peek_char() { + if ch == '"' { + self.next_char(); + return Ok(Spanning::start_end( + &start_pos, + &self.position, + Token::String(acc))); + } + else if ch == '\\' { + self.next_char(); + + match self.peek_char() { + Some((_, '"')) => { self.next_char(); acc.push('"'); }, + Some((_, '\\')) => { self.next_char(); acc.push('\\'); }, + Some((_, '/')) => { self.next_char(); acc.push('/'); }, + Some((_, 'b')) => { self.next_char(); acc.push('\u{0008}'); }, + Some((_, 'f')) => { self.next_char(); acc.push('\u{000c}'); }, + Some((_, 'n')) => { self.next_char(); acc.push('\n'); }, + Some((_, 'r')) => { self.next_char(); acc.push('\r'); }, + Some((_, 't')) => { self.next_char(); acc.push('\t'); }, + Some((_, 'u')) => { + let start_pos = self.position.clone(); + self.next_char(); + acc.push(try!(self.scan_escaped_unicode(&start_pos))); + }, + Some((_, ch)) => { + let mut s = String::from("\\"); + s.push(ch); + + return Err(Spanning::zero_width( + &self.position, + LexerError::UnknownEscapeSequence(s))); + }, + None => { + return Err(Spanning::zero_width( + &self.position, + LexerError::UnterminatedString)); + }, + } + if let Some((_, ch)) = self.peek_char() { + if ch == 'n' { + + } + } + else { + return Err(Spanning::zero_width( + &self.position, + LexerError::UnterminatedString)); + } + } + else if ch == '\n' || ch == '\r' { + return Err(Spanning::zero_width( + &self.position, + LexerError::UnterminatedString)); + } + else if !is_source_char(ch) { + return Err(Spanning::zero_width( + &self.position, + LexerError::UnknownCharacterInString(ch))); + } + else { + self.next_char(); + acc.push(ch); + } + } + + Err(Spanning::zero_width(&self.position, LexerError::UnterminatedString)) + } + + fn scan_escaped_unicode(&mut self, start_pos: &SourcePosition) -> Result> { + let (start_idx, _) = try!(self.peek_char().ok_or( + Spanning::zero_width(&self.position, LexerError::UnterminatedString))); + let mut end_idx = start_idx; + let mut len = 0; + + for _ in 0..4 { + let (idx, ch) = try!(self.next_char().ok_or( + Spanning::zero_width(&self.position, LexerError::UnterminatedString))); + + if !ch.is_alphanumeric() { + break; + } + + end_idx = idx; + len += 1; + } + + let escape = &self.source[start_idx..end_idx+1]; + + if len != 4 { + return Err(Spanning::zero_width( + start_pos, + LexerError::UnknownEscapeSequence("\\u".to_owned() + escape))); + } + + let code_point = try!(u32::from_str_radix(escape, 16).map_err(|_| + Spanning::zero_width( + start_pos, + LexerError::UnknownEscapeSequence("\\u".to_owned() + escape)))); + + char::from_u32(code_point).ok_or_else(|| + Spanning::zero_width( + start_pos, + LexerError::UnknownEscapeSequence("\\u".to_owned() + escape))) + } + + fn scan_number(&mut self) -> LexerResult<'a> { + let start_pos = self.position.clone(); + let int_part = try!(self.scan_integer_part()); + let mut frac_part = None; + let mut exp_part = None; + + if let Some((_, '.')) = self.peek_char() { + self.next_char(); + + frac_part = Some(try!(self.scan_digits())); + } + + if let Some((_, ch)) = self.peek_char() { + if ch == 'e' || ch == 'E' { + self.next_char(); + + let mut is_negative = false; + + if let Some((_, ch)) = self.peek_char() { + if ch == '-' { + self.next_char(); + is_negative = true; + } + else if ch == '+' { + self.next_char(); + } + } + exp_part = Some(if is_negative { -1 } else { 1 } * try!(self.scan_digits())); + } + } + + let mantissa = frac_part.map(|f| f as f64).map(|frac| + if frac > 0f64 { + frac / 10f64.powf(frac.log10().floor() + 1f64) + } + else { + 0f64 + }).map(|m| if int_part < 0 { -m } else { m }); + + let exp = exp_part.map(|e| e as f64).map(|e| 10f64.powf(e)); + + Ok(Spanning::start_end( + &start_pos, + &self.position, + match (mantissa, exp) { + (None, None) => Token::Int(int_part), + (None, Some(exp)) => Token::Float((int_part as f64) * exp), + (Some(mantissa), None) => Token::Float((int_part as f64) + mantissa), + (Some(mantissa), Some(exp)) => Token::Float(((int_part as f64) + mantissa) * exp), + })) + } + + fn scan_integer_part(&mut self) -> Result> { + let is_negative = { + let (_, init_ch) = try!(self.peek_char().ok_or( + Spanning::zero_width(&self.position, LexerError::UnexpectedEndOfFile))); + + if init_ch == '-' { + self.next_char(); + true + } + else { + false + } + }; + + let (_, ch) = try!(self.peek_char().ok_or( + Spanning::zero_width(&self.position, LexerError::UnexpectedEndOfFile))); + + if ch == '0' { + self.next_char(); + + match self.peek_char() { + Some((_, '0')) => Err(Spanning::zero_width(&self.position, LexerError::UnexpectedCharacter(ch))), + _ => Ok(0), + } + } + else { + Ok(try!(self.scan_digits()) * if is_negative { -1 } else { 1 }) + } + } + + fn scan_digits(&mut self) -> Result> { + let start_pos = self.position.clone(); + let (start_idx, ch) = try!(self.peek_char().ok_or( + Spanning::zero_width(&self.position, LexerError::UnexpectedEndOfFile))); + let mut end_idx = start_idx; + + if !ch.is_digit(10) { + return Err(Spanning::zero_width(&self.position, LexerError::UnexpectedCharacter(ch))); + } + + while let Some((idx, ch)) = self.peek_char() { + if !ch.is_digit(10) { + break; + } + else { + self.next_char(); + end_idx = idx; + } + } + + i64::from_str_radix(&self.source[start_idx..end_idx+1], 10) + .map_err(|_| Spanning::zero_width(&start_pos, LexerError::InvalidNumber)) + } +} + +impl<'a> Iterator for Lexer<'a> { + type Item = LexerResult<'a>; + + fn next(&mut self) -> Option { + if self.has_reached_eof { + return None; + } + + self.scan_over_whitespace(); + + let ch = self.iterator.peek().map(|&(_, ch)| ch); + + Some(match ch { + Some('!') => Ok(self.emit_single_char(Token::ExclamationMark)), + Some('$') => Ok(self.emit_single_char(Token::Dollar)), + Some('(') => Ok(self.emit_single_char(Token::ParenOpen)), + Some(')') => Ok(self.emit_single_char(Token::ParenClose)), + Some('[') => Ok(self.emit_single_char(Token::BracketOpen)), + Some(']') => Ok(self.emit_single_char(Token::BracketClose)), + Some('{') => Ok(self.emit_single_char(Token::CurlyOpen)), + Some('}') => Ok(self.emit_single_char(Token::CurlyClose)), + Some(':') => Ok(self.emit_single_char(Token::Colon)), + Some('=') => Ok(self.emit_single_char(Token::Equals)), + Some('@') => Ok(self.emit_single_char(Token::At)), + Some('|') => Ok(self.emit_single_char(Token::Pipe)), + Some('.') => self.scan_ellipsis(), + Some('"') => self.scan_string(), + Some(ch) => { + if is_number_start(ch) { + self.scan_number() + } + else if is_name_start(ch) { + self.scan_name() + } + else { + Err(Spanning::zero_width(&self.position, LexerError::UnknownCharacter(ch))) + } + }, + None => { + self.has_reached_eof = true; + Ok(Spanning::zero_width( + &self.position, Token::EndOfFile)) + }, + }) + } +} + +impl<'a> fmt::Display for Token<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Token::Name(ref name) => write!(f, "{}", name), + Token::Int(i) => write!(f, "{}", i), + Token::Float(v) => write!(f, "{}", v), + Token::String(ref s) => write!(f, "{}", s), + Token::ExclamationMark => write!(f, "!"), + Token::Dollar => write!(f, "$"), + Token::ParenOpen => write!(f, "("), + Token::ParenClose => write!(f, ")"), + Token::BracketOpen => write!(f, "["), + Token::BracketClose => write!(f, "]"), + Token::CurlyOpen => write!(f, "{{"), + Token::CurlyClose => write!(f, "}}"), + Token::Ellipsis => write!(f, "..."), + Token::Colon => write!(f, ":"), + Token::Equals => write!(f, "="), + Token::At => write!(f, "@"), + Token::Pipe => write!(f, "|"), + Token::EndOfFile => write!(f, "End of file"), + } + } +} + +fn is_source_char(c: char) -> bool { + c == '\t' || c == '\n' || c == '\r' || c >= ' ' +} + +fn is_name_start(c: char) -> bool { + c == '_' || (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') +} + +fn is_name_cont(c: char) -> bool { + is_name_start(c) || (c >= '0' && c <= '9') +} + +fn is_number_start(c: char) -> bool { + c == '-' || (c >= '0' && c <= '9') +} + +impl fmt::Display for LexerError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + LexerError::UnknownCharacter(c) => write!(f, "Unknown character \"{}\"", c), + LexerError::UnterminatedString => write!(f, "Unterminated string literal"), + LexerError::UnknownCharacterInString(c) => write!(f, "Unknown character \"{}\" in string literal", c), + LexerError::UnknownEscapeSequence(ref s) => write!(f, "Unknown escape sequence \"{}\" in string", s), + LexerError::UnexpectedCharacter(c) => write!(f, "Unexpected character \"{}\"", c), + LexerError::UnexpectedEndOfFile => write!(f, "Unexpected end of input"), + LexerError::InvalidNumber => write!(f, "Invalid number literal"), + } + } +} diff --git a/src/parser/mod.rs b/src/parser/mod.rs new file mode 100644 index 00000000..73a577bd --- /dev/null +++ b/src/parser/mod.rs @@ -0,0 +1,16 @@ +//! Query parser and language utilities + +mod utils; +mod lexer; +mod parser; +mod value; +mod document; + +#[cfg(test)] +mod tests; + +pub use self::document::parse_document_source; + +pub use self::parser::{Parser, ParseError, ParseResult, UnlocatedParseResult, OptionParseResult}; +pub use self::lexer::{Token, Lexer, LexerError}; +pub use self::utils::{Spanning, SourcePosition}; diff --git a/src/parser/parser.rs b/src/parser/parser.rs new file mode 100644 index 00000000..9e05cd21 --- /dev/null +++ b/src/parser/parser.rs @@ -0,0 +1,179 @@ +use std::result::Result; +use std::fmt; + +use parser::{Spanning, Token, LexerError, Lexer}; + +/// Error while parsing a GraphQL query +#[derive(Debug, PartialEq)] +pub enum ParseError<'a> { + /// An unexpected token occurred in the source + UnexpectedToken(Token<'a>), + + /// The input source abruptly ended + UnexpectedEndOfFile, + + /// An error during tokenization occurred + LexerError(LexerError), +} + +#[doc(hidden)] +pub type ParseResult<'a, T> = Result, Spanning>>; + +#[doc(hidden)] +pub type UnlocatedParseResult<'a, T> = Result>>; + +#[doc(hidden)] +pub type OptionParseResult<'a, T> = Result>, Spanning>>; + +#[doc(hidden)] +#[derive(Debug)] +pub struct Parser<'a> { + tokens: Vec>>, +} + +impl<'a> Parser<'a> { + #[doc(hidden)] + pub fn new(lexer: &mut Lexer<'a>) -> Result, Spanning> { + let mut tokens = Vec::new(); + + for res in lexer { + match res { + Ok(s) => tokens.push(s), + Err(e) => return Err(e), + } + } + + Ok(Parser { + tokens: tokens, + }) + } + + #[doc(hidden)] + pub fn peek(&self) -> &Spanning> { + &self.tokens[0] + } + + #[doc(hidden)] + pub fn next(&mut self) -> Spanning> { + if self.tokens.len() == 1 { + panic!("Can not parse over EOF marker"); + } + + self.tokens.remove(0) + } + + #[doc(hidden)] + pub fn expect(&mut self, expected: &Token) -> ParseResult<'a, Token<'a>> { + if &self.peek().item != expected { + Err(self.next().map(ParseError::UnexpectedToken)) + } + else { + Ok(self.next()) + } + } + + #[doc(hidden)] + pub fn skip(&mut self, expected: &Token) -> Result>>, Spanning>> { + if &self.peek().item == expected { + Ok(Some(self.next())) + } + else if self.peek().item == Token::EndOfFile { + Err(Spanning::zero_width( + &self.peek().start, + ParseError::UnexpectedEndOfFile)) + } + else { + Ok(None) + } + } + + #[doc(hidden)] + pub fn delimited_list(&mut self, opening: &Token, parser: F, closing: &Token) + -> ParseResult<'a, Vec>> + where T: fmt::Debug, F: Fn(&mut Parser<'a>) -> ParseResult<'a, T> + { + let Spanning { start: start_pos, .. } = try!(self.expect(opening)); + let mut items = Vec::new(); + + loop { + if let Some(Spanning { end: end_pos, .. }) = try!(self.skip(closing)) { + return Ok(Spanning::start_end( + &start_pos, + &end_pos, + items)); + } + + items.push(try!(parser(self))); + } + } + + #[doc(hidden)] + pub fn delimited_nonempty_list(&mut self, opening: &Token, parser: F, closing: &Token) + -> ParseResult<'a, Vec>> + where T: fmt::Debug, F: Fn(&mut Parser<'a>) -> ParseResult<'a, T> + { + let Spanning { start: start_pos, .. } = try!(self.expect(opening)); + let mut items = Vec::new(); + + loop { + items.push(try!(parser(self))); + + if let Some(Spanning { end: end_pos, .. }) = try!(self.skip(closing)) { + return Ok(Spanning::start_end( + &start_pos, + &end_pos, + items)); + } + } + } + + #[doc(hidden)] + pub fn unlocated_delimited_nonempty_list(&mut self, opening: &Token, parser: F, closing: &Token) + -> ParseResult<'a, Vec> + where T: fmt::Debug, F: Fn(&mut Parser<'a>) -> UnlocatedParseResult<'a, T> + { + let Spanning { start: start_pos, .. } = try!(self.expect(opening)); + let mut items = Vec::new(); + + loop { + items.push(try!(parser(self))); + + if let Some(Spanning { end: end_pos, .. }) = try!(self.skip(closing)) { + return Ok(Spanning::start_end( + &start_pos, + &end_pos, + items)); + } + } + } + + #[doc(hidden)] + pub fn expect_name(&mut self) -> ParseResult<'a, String> { + match *self.peek() { + Spanning { item: Token::Name(_), .. } => + Ok(self.next().map(|token| + if let Token::Name(name) = token { + name.to_owned() + } + else { + panic!("Internal parse error in `expect_name`"); + })), + Spanning { item: Token::EndOfFile, .. } => + Err(Spanning::start_end( + &self.peek().start.clone(), + &self.peek().end.clone(), + ParseError::UnexpectedEndOfFile)), + _ => Err(self.next().map(ParseError::UnexpectedToken)), + } + } +} + +impl<'a> fmt::Display for ParseError<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + ParseError::UnexpectedToken(ref token) => write!(f, "Unexpected \"{}\"", token), + ParseError::UnexpectedEndOfFile => write!(f, "Unexpected end of input"), + ParseError::LexerError(ref err) => err.fmt(f), + } + } +} diff --git a/src/parser/tests/document.rs b/src/parser/tests/document.rs new file mode 100644 index 00000000..3908a69d --- /dev/null +++ b/src/parser/tests/document.rs @@ -0,0 +1,123 @@ +use ast::{Definition, Operation, Document, OperationType, Field, Selection, InputValue, Arguments}; +use parser::{Spanning, SourcePosition, ParseError, Token}; +use parser::document::parse_document_source; + +fn parse_document(s: &str) -> Document { + parse_document_source(s) + .expect(&format!("Parse error on input {:#?}", s)) +} + +fn parse_document_error<'a>(s: &'a str) -> Spanning> { + match parse_document_source(s) { + Ok(doc) => panic!("*No* parse error on input {:#?} =>\n{:#?}", s, doc), + Err(err) => err, + } +} + +#[test] +fn simple_ast() { + assert_eq!( + parse_document(r#" + { + node(id: 4) { + id + name + } + } + "#), + vec![ + Definition::Operation(Spanning::start_end( + &SourcePosition::new(13, 1, 12), + &SourcePosition::new(124, 6, 13), + Operation { + operation_type: OperationType::Query, + name: None, + variable_definitions: None, + directives: None, + selection_set: vec![ + Selection::Field( + Spanning::start_end( + &SourcePosition::new(31, 2, 16), + &SourcePosition::new(110, 5, 17), + Field { + alias: None, + name: Spanning::start_end( + &SourcePosition::new(31, 2, 16), + &SourcePosition::new(35, 2, 20), + "node".to_owned()), + arguments: Some(Spanning::start_end( + &SourcePosition::new(35, 2, 20), + &SourcePosition::new(42, 2, 27), + Arguments { + items: vec![ + ( + Spanning::start_end( + &SourcePosition::new(36, 2, 21), + &SourcePosition::new(38, 2, 23), + "id".to_owned()), + Spanning::start_end( + &SourcePosition::new(40, 2, 25), + &SourcePosition::new(41, 2, 26), + InputValue::int(4)) + ), + ] + })), + directives: None, + selection_set: Some(vec![ + Selection::Field( + Spanning::start_end( + &SourcePosition::new(65, 3, 20), + &SourcePosition::new(67, 3, 22), + Field { + alias: None, + name: Spanning::start_end( + &SourcePosition::new(65, 3, 20), + &SourcePosition::new(67, 3, 22), + "id".to_owned()), + arguments: None, + directives: None, + selection_set: None, + })), + Selection::Field( + Spanning::start_end( + &SourcePosition::new(88, 4, 20), + &SourcePosition::new(92, 4, 24), + Field { + alias: None, + name: Spanning::start_end( + &SourcePosition::new(88, 4, 20), + &SourcePosition::new(92, 4, 24), + "name".to_owned()), + arguments: None, + directives: None, + selection_set: None, + })), + ]), + })) + ] + })) + ]) +} + +#[test] +fn errors() { + assert_eq!( + parse_document_error("{"), + Spanning::zero_width( + &SourcePosition::new(1, 0, 1), + ParseError::UnexpectedEndOfFile)); + + assert_eq!( + parse_document_error("{ ...MissingOn }\nfragment MissingOn Type"), + Spanning::start_end( + &SourcePosition::new(36, 1, 19), + &SourcePosition::new(40, 1, 23), + ParseError::UnexpectedToken(Token::Name("Type")))); + + assert_eq!( + parse_document_error("{ ...on }"), + Spanning::start_end( + &SourcePosition::new(8, 0, 8), + &SourcePosition::new(9, 0, 9), + ParseError::UnexpectedToken(Token::CurlyClose))); +} diff --git a/src/parser/tests/lexer.rs b/src/parser/tests/lexer.rs new file mode 100644 index 00000000..934f59cc --- /dev/null +++ b/src/parser/tests/lexer.rs @@ -0,0 +1,536 @@ +use parser::{Lexer, SourcePosition, Spanning, Token, LexerError}; + +fn tokenize_to_vec<'a>(s: &'a str) -> Vec>> { + let mut tokens = Vec::new(); + let mut lexer = Lexer::new(s); + + loop { + match lexer.next() { + Some(Ok(t)) => { + let at_eof = t.item == Token::EndOfFile; + tokens.push(t); + if at_eof { + break; + } + }, + Some(Err(e)) => panic!("Error in input stream: {:#?} for {:#?}", e, s), + None => panic!("EOF before EndOfFile token in {:#?}", s), + } + } + + tokens +} + +fn tokenize_single<'a>(s: &'a str) -> Spanning> { + let mut tokens = tokenize_to_vec(s); + + assert_eq!(tokens.len(), 2); + assert_eq!(tokens[1].item, Token::EndOfFile); + + tokens.remove(0) +} + +fn tokenize_error(s: &str) -> Spanning { + let mut lexer = Lexer::new(s); + + loop { + match lexer.next() { + Some(Ok(t)) => { + if t.item == Token::EndOfFile { + panic!("Tokenizer did not return error for {:#?}", s); + } + }, + Some(Err(e)) => { + return e; + }, + None => panic!("Tokenizer did not return error for {:#?}", s), + } + } +} + +#[test] +fn empty_source() { + assert_eq!( + tokenize_to_vec(""), + vec![ + Spanning::zero_width(&SourcePosition::new_origin(), Token::EndOfFile), + ]); +} + +#[test] +fn disallow_control_codes() { + assert_eq!( + Lexer::new("\u{0007}").next(), + Some(Err(Spanning::zero_width( + &SourcePosition::new_origin(), + LexerError::UnknownCharacter('\u{0007}'))))); +} + +#[test] +fn skip_whitespace() { + assert_eq!( + tokenize_to_vec(r#" + + foo + + "#), + vec![ + Spanning::start_end( + &SourcePosition::new(14, 2, 12), + &SourcePosition::new(17, 2, 15), + Token::Name("foo"), + ), + Spanning::zero_width( + &SourcePosition::new(31, 4, 12), + Token::EndOfFile), + ]); +} + +#[test] +fn skip_comments() { + assert_eq!( + tokenize_to_vec(r#" + #comment + foo#comment + "#), + vec![ + Spanning::start_end( + &SourcePosition::new(34, 2, 12), + &SourcePosition::new(37, 2, 15), + Token::Name("foo"), + ), + Spanning::zero_width( + &SourcePosition::new(58, 3, 12), + Token::EndOfFile), + ]); +} + +#[test] +fn skip_commas() { + assert_eq!( + tokenize_to_vec(r#",,,foo,,,"#), + vec![ + Spanning::start_end( + &SourcePosition::new(3, 0, 3), + &SourcePosition::new(6, 0, 6), + Token::Name("foo"), + ), + Spanning::zero_width( + &SourcePosition::new(9, 0, 9), + Token::EndOfFile), + ]); +} + +#[test] +fn error_positions() { + assert_eq!( + Lexer::new(r#" + + ? + + "#).next(), + Some(Err(Spanning::zero_width( + &SourcePosition::new(14, 2, 12), + LexerError::UnknownCharacter('?'))))); +} + +#[test] +fn strings() { + assert_eq!( + tokenize_single(r#""simple""#), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(8, 0, 8), + Token::String("simple".to_owned()))); + + assert_eq!( + tokenize_single(r#"" white space ""#), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(15, 0, 15), + Token::String(" white space ".to_owned()))); + + assert_eq!( + tokenize_single(r#""quote \"""#), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(10, 0, 10), + Token::String("quote \"".to_owned()))); + + assert_eq!( + tokenize_single(r#""escaped \n\r\b\t\f""#), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(20, 0, 20), + Token::String("escaped \n\r\u{0008}\t\u{000c}".to_owned()))); + + assert_eq!( + tokenize_single(r#""slashes \\ \/""#), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(15, 0, 15), + Token::String("slashes \\ /".to_owned()))); + + assert_eq!( + tokenize_single(r#""unicode \u1234\u5678\u90AB\uCDEF""#), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(34, 0, 34), + Token::String("unicode \u{1234}\u{5678}\u{90ab}\u{cdef}".to_owned()))); +} + +#[test] +fn string_errors() { + assert_eq!( + tokenize_error("\""), + Spanning::zero_width( + &SourcePosition::new(1, 0, 1), + LexerError::UnterminatedString)); + + assert_eq!( + tokenize_error("\"no end quote"), + Spanning::zero_width( + &SourcePosition::new(13, 0, 13), + LexerError::UnterminatedString)); + + assert_eq!( + tokenize_error("\"contains unescaped \u{0007} control char\""), + Spanning::zero_width( + &SourcePosition::new(20, 0, 20), + LexerError::UnknownCharacterInString('\u{0007}'))); + + assert_eq!( + tokenize_error("\"null-byte is not \u{0000} end of file\""), + Spanning::zero_width( + &SourcePosition::new(18, 0, 18), + LexerError::UnknownCharacterInString('\u{0000}'))); + + assert_eq!( + tokenize_error("\"multi\nline\""), + Spanning::zero_width( + &SourcePosition::new(6, 0, 6), + LexerError::UnterminatedString)); + + assert_eq!( + tokenize_error("\"multi\rline\""), + Spanning::zero_width( + &SourcePosition::new(6, 0, 6), + LexerError::UnterminatedString)); + + assert_eq!( + tokenize_error(r#""bad \z esc""#), + Spanning::zero_width( + &SourcePosition::new(6, 0, 6), + LexerError::UnknownEscapeSequence("\\z".to_owned()))); + + assert_eq!( + tokenize_error(r#""bad \x esc""#), + Spanning::zero_width( + &SourcePosition::new(6, 0, 6), + LexerError::UnknownEscapeSequence("\\x".to_owned()))); + + assert_eq!( + tokenize_error(r#""bad \u1 esc""#), + Spanning::zero_width( + &SourcePosition::new(6, 0, 6), + LexerError::UnknownEscapeSequence("\\u1".to_owned()))); + + assert_eq!( + tokenize_error(r#""bad \u0XX1 esc""#), + Spanning::zero_width( + &SourcePosition::new(6, 0, 6), + LexerError::UnknownEscapeSequence("\\u0XX1".to_owned()))); + + assert_eq!( + tokenize_error(r#""bad \uXXXX esc""#), + Spanning::zero_width( + &SourcePosition::new(6, 0, 6), + LexerError::UnknownEscapeSequence("\\uXXXX".to_owned()))); + + assert_eq!( + tokenize_error(r#""bad \uFXXX esc""#), + Spanning::zero_width( + &SourcePosition::new(6, 0, 6), + LexerError::UnknownEscapeSequence("\\uFXXX".to_owned()))); + + assert_eq!( + tokenize_error(r#""bad \uXXXF esc""#), + Spanning::zero_width( + &SourcePosition::new(6, 0, 6), + LexerError::UnknownEscapeSequence("\\uXXXF".to_owned()))); +} + +#[test] +fn numbers() { + assert_eq!( + tokenize_single("4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(1, 0, 1), + Token::Int(4))); + + assert_eq!( + tokenize_single("4.123"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(5, 0, 5), + Token::Float(4.123))); + + assert_eq!( + tokenize_single("-4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(2, 0, 2), + Token::Int(-4))); + + assert_eq!( + tokenize_single("9"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(1, 0, 1), + Token::Int(9))); + + assert_eq!( + tokenize_single("0"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(1, 0, 1), + Token::Int(0))); + + assert_eq!( + tokenize_single("-4.123"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(6, 0, 6), + Token::Float(-4.123))); + + assert_eq!( + tokenize_single("0.123"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(5, 0, 5), + Token::Float(0.123))); + + assert_eq!( + tokenize_single("123e4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(5, 0, 5), + Token::Float(123e4))); + + assert_eq!( + tokenize_single("123E4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(5, 0, 5), + Token::Float(123e4))); + + assert_eq!( + tokenize_single("123e-4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(6, 0, 6), + Token::Float(123e-4))); + + assert_eq!( + tokenize_single("123e+4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(6, 0, 6), + Token::Float(123e4))); + + assert_eq!( + tokenize_single("-1.123e4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(8, 0, 8), + Token::Float(-1.123e4))); + + assert_eq!( + tokenize_single("-1.123E4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(8, 0, 8), + Token::Float(-1.123e4))); + + assert_eq!( + tokenize_single("-1.123e-4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(9, 0, 9), + Token::Float(-1.123e-4))); + + assert_eq!( + tokenize_single("-1.123e+4"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(9, 0, 9), + Token::Float(-1.123e4))); + + assert_eq!( + tokenize_single("-1.123e45"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(9, 0, 9), + Token::Float(-1.123e45))); +} + +#[test] +fn numbers_errors() { + assert_eq!( + tokenize_error("00"), + Spanning::zero_width( + &SourcePosition::new(1, 0, 1), + LexerError::UnexpectedCharacter('0'))); + + assert_eq!( + tokenize_error("+1"), + Spanning::zero_width( + &SourcePosition::new(0, 0, 0), + LexerError::UnknownCharacter('+'))); + + assert_eq!( + tokenize_error("1."), + Spanning::zero_width( + &SourcePosition::new(2, 0, 2), + LexerError::UnexpectedEndOfFile)); + + assert_eq!( + tokenize_error(".123"), + Spanning::zero_width( + &SourcePosition::new(0, 0, 0), + LexerError::UnexpectedCharacter('.'))); + + assert_eq!( + tokenize_error("1.A"), + Spanning::zero_width( + &SourcePosition::new(2, 0, 2), + LexerError::UnexpectedCharacter('A'))); + + assert_eq!( + tokenize_error("-A"), + Spanning::zero_width( + &SourcePosition::new(1, 0, 1), + LexerError::UnexpectedCharacter('A'))); + + assert_eq!( + tokenize_error("1.0e"), + Spanning::zero_width( + &SourcePosition::new(4, 0, 4), + LexerError::UnexpectedEndOfFile)); + + assert_eq!( + tokenize_error("1.0eA"), + Spanning::zero_width( + &SourcePosition::new(4, 0, 4), + LexerError::UnexpectedCharacter('A'))); +} + +#[test] +fn punctuation() { + assert_eq!( + tokenize_single("!"), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::ExclamationMark)); + + assert_eq!( + tokenize_single("$"), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::Dollar)); + + assert_eq!( + tokenize_single("("), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::ParenOpen)); + + assert_eq!( + tokenize_single(")"), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::ParenClose)); + + assert_eq!( + tokenize_single("..."), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(3, 0, 3), + Token::Ellipsis)); + + assert_eq!( + tokenize_single(":"), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::Colon)); + + assert_eq!( + tokenize_single("="), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::Equals)); + + assert_eq!( + tokenize_single("@"), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::At)); + + assert_eq!( + tokenize_single("["), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::BracketOpen)); + + assert_eq!( + tokenize_single("]"), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::BracketClose)); + + assert_eq!( + tokenize_single("{"), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::CurlyOpen)); + + assert_eq!( + tokenize_single("}"), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::CurlyClose)); + + assert_eq!( + tokenize_single("|"), + Spanning::single_width( + &SourcePosition::new(0, 0, 0), + Token::Pipe)); +} + +#[test] +fn punctuation_error() { + assert_eq!( + tokenize_error(".."), + Spanning::zero_width( + &SourcePosition::new(2, 0, 2), + LexerError::UnexpectedEndOfFile)); + + assert_eq!( + tokenize_error("?"), + Spanning::zero_width( + &SourcePosition::new(0, 0, 0), + LexerError::UnknownCharacter('?'))); + + assert_eq!( + tokenize_error("\u{203b}"), + Spanning::zero_width( + &SourcePosition::new(0, 0, 0), + LexerError::UnknownCharacter('\u{203b}'))); + + assert_eq!( + tokenize_error("\u{200b}"), + Spanning::zero_width( + &SourcePosition::new(0, 0, 0), + LexerError::UnknownCharacter('\u{200b}'))); +} diff --git a/src/parser/tests/mod.rs b/src/parser/tests/mod.rs new file mode 100644 index 00000000..18df2c92 --- /dev/null +++ b/src/parser/tests/mod.rs @@ -0,0 +1,3 @@ +mod document; +mod lexer; +mod value; diff --git a/src/parser/tests/value.rs b/src/parser/tests/value.rs new file mode 100644 index 00000000..820b6a39 --- /dev/null +++ b/src/parser/tests/value.rs @@ -0,0 +1,134 @@ +use std::collections::HashMap; + +use ast::InputValue; +use parser::{Lexer, Spanning, SourcePosition, Parser}; +use parser::value::parse_value_literal; + +fn parse_value(s: &str) -> Spanning { + let mut lexer = Lexer::new(s); + let mut parser = Parser::new(&mut lexer) + .expect(&format!("Lexer error on input {:#?}", s)); + + parse_value_literal(&mut parser, false) + .expect(&format!("Parse error on input {:#?}", s)) +} + +#[test] +fn input_value_literals() { + assert_eq!( + parse_value("123"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(3, 0, 3), + InputValue::int(123))); + assert_eq!( + parse_value("123.45"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(6, 0, 6), + InputValue::float(123.45))); + assert_eq!( + parse_value("true"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(4, 0, 4), + InputValue::boolean(true))); + assert_eq!( + parse_value("false"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(5, 0, 5), + InputValue::boolean(false))); + assert_eq!( + parse_value(r#""test""#), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(6, 0, 6), + InputValue::string("test"))); + assert_eq!( + parse_value("enum_value"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(10, 0, 10), + InputValue::enum_value("enum_value"))); + assert_eq!( + parse_value("$variable"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(9, 0, 9), + InputValue::variable("variable"))); + assert_eq!( + parse_value("[]"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(2, 0, 2), + InputValue::list(vec![]))); + assert_eq!( + parse_value("[1, [2, 3]]"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(11, 0, 11), + InputValue::parsed_list(vec![ + Spanning::start_end( + &SourcePosition::new(1, 0, 1), + &SourcePosition::new(2, 0, 2), + InputValue::int(1)), + Spanning::start_end( + &SourcePosition::new(4, 0, 4), + &SourcePosition::new(10, 0, 10), + InputValue::parsed_list(vec![ + Spanning::start_end( + &SourcePosition::new(5, 0, 5), + &SourcePosition::new(6, 0, 6), + InputValue::int(2)), + Spanning::start_end( + &SourcePosition::new(8, 0, 8), + &SourcePosition::new(9, 0, 9), + InputValue::int(3)), + ])), + ]))); + assert_eq!( + parse_value("{}"), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(2, 0, 2), + InputValue::object(HashMap::::new()))); + assert_eq!( + parse_value(r#"{key: 123, other: {foo: "bar"}}"#), + Spanning::start_end( + &SourcePosition::new(0, 0, 0), + &SourcePosition::new(31, 0, 31), + InputValue::parsed_object(vec![ + ( + Spanning::start_end( + &SourcePosition::new(1, 0, 1), + &SourcePosition::new(4, 0, 4), + "key".to_owned()), + Spanning::start_end( + &SourcePosition::new(6, 0, 6), + &SourcePosition::new(9, 0, 9), + InputValue::int(123)) + ), + ( + Spanning::start_end( + &SourcePosition::new(11, 0, 11), + &SourcePosition::new(16, 0, 16), + "other".to_owned()), + Spanning::start_end( + &SourcePosition::new(18, 0, 18), + &SourcePosition::new(30, 0, 30), + InputValue::parsed_object(vec![ + ( + Spanning::start_end( + &SourcePosition::new(19, 0, 19), + &SourcePosition::new(22, 0, 22), + "foo".to_owned()), + Spanning::start_end( + &SourcePosition::new(24, 0, 24), + &SourcePosition::new(29, 0, 29), + InputValue::string("bar")) + ) + ])) + ) + ]))); +} diff --git a/src/parser/utils.rs b/src/parser/utils.rs new file mode 100644 index 00000000..9d2d009b --- /dev/null +++ b/src/parser/utils.rs @@ -0,0 +1,177 @@ +use std::fmt; +use std::hash::{Hash, Hasher}; + +/// A reference to a line and column in an input source file +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct SourcePosition { + index: usize, + line: usize, + col: usize, +} + +/// Data structure used to wrap items with start and end markers in the input source +/// +/// A "span" is a range of characters in the input source, starting at the +/// character pointed by the `start` field and ending just before the `end` +/// marker. +#[derive(Debug)] +pub struct Spanning { + /// The wrapped item + pub item: T, + + /// Start position of the item + pub start: SourcePosition, + + /// End position of the item + /// + /// This points to the first source position _after_ the wrapped item. + pub end: SourcePosition, +} + +impl Spanning { + #[doc(hidden)] + pub fn zero_width(pos: &SourcePosition, item: T) -> Spanning { + Spanning { + item: item, + start: pos.clone(), + end: pos.clone(), + } + } + + #[doc(hidden)] + pub fn single_width(pos: &SourcePosition, item: T) -> Spanning { + let mut end = pos.clone(); + end.advance_col(); + + Spanning { + item: item, + start: pos.clone(), + end: end, + } + } + + #[doc(hidden)] + pub fn start_end(start: &SourcePosition, end: &SourcePosition, item: T) -> Spanning { + Spanning { + item: item, + start: start.clone(), + end: end.clone(), + } + } + + #[doc(hidden)] + pub fn spanning(v: Vec>) -> Option>>> { + if let (Some(start), Some(end)) = (v.first().map(|s| s.start.clone()), v.last().map(|s| s.end.clone())) { + Some(Spanning { + item: v, + start: start, + end: end, + }) + } + else { + None + } + } + + #[doc(hidden)] + pub fn unlocated(item: T) -> Spanning { + Spanning { + item: item, + start: SourcePosition::new_origin(), + end: SourcePosition::new_origin(), + } + } + + /// Modify the contents of the spanned item + pub fn map O>(self, f: F) -> Spanning { + Spanning { + item: f(self.item), + start: self.start.clone(), + end: self.end.clone(), + } + } +} + +impl Clone for Spanning where T: Clone + fmt::Debug { + fn clone(&self) -> Self { + Spanning { + start: self.start.clone(), + end: self.end.clone(), + item: self.item.clone(), + } + } +} + +impl PartialEq for Spanning where T: PartialEq + fmt::Debug { + fn eq(&self, other: &Self) -> bool { + self.start == other.start && self.end == other.end && self.item == other.item + } +} + +impl Eq for Spanning where T: Eq + fmt::Debug { +} + +impl Hash for Spanning where T: Hash + fmt::Debug { + fn hash(&self, state: &mut H) { + self.start.hash(state); + self.end.hash(state); + self.item.hash(state); + } +} + +impl SourcePosition { + #[doc(hidden)] + pub fn new(index: usize, line: usize, col: usize) -> SourcePosition { + assert!(index >= line + col); + + SourcePosition { + index: index, + line: line, + col: col, + } + } + + #[doc(hidden)] + pub fn new_origin() -> SourcePosition { + SourcePosition { + index: 0, + line: 0, + col: 0, + } + } + + #[doc(hidden)] + pub fn advance_col(&mut self) { + self.index += 1; + self.col += 1; + } + + #[doc(hidden)] + pub fn advance_line(&mut self) { + self.index += 1; + self.line += 1; + self.col = 0; + } + + /// The index of the character in the input source + /// + /// Zero-based index. Take a substring of the original source starting at + /// this index to access the item pointed to by this `SourcePosition`. + pub fn index(&self) -> usize { + self.index + } + + /// The line of the character in the input source + /// + /// Zero-based index: the first line is line zero. + pub fn line(&self) -> usize { + self.line + } + + /// The column of the character in the input source + /// + /// Zero-based index: the first column is column zero. + pub fn column(&self) -> usize { + self.col + } +} diff --git a/src/parser/value.rs b/src/parser/value.rs new file mode 100644 index 00000000..0c1cd086 --- /dev/null +++ b/src/parser/value.rs @@ -0,0 +1,71 @@ +use ast::InputValue; + +use parser::{Parser, ParseResult, ParseError, Token, Spanning}; + +pub fn parse_value_literal<'a>(parser: &mut Parser<'a>, is_const: bool) -> ParseResult<'a, InputValue> { + match *parser.peek() { + Spanning { item: Token::BracketOpen, .. } => parse_list_literal(parser, is_const), + Spanning { item: Token::CurlyOpen, .. } => parse_object_literal(parser, is_const), + Spanning { item: Token::Dollar, .. } if !is_const => parse_variable_literal(parser), + Spanning { item: Token::Int(i), .. } => + Ok(parser.next().map(|_| InputValue::int(i))), + Spanning { item: Token::Float(f), .. } => + Ok(parser.next().map(|_| InputValue::float(f))), + Spanning { item: Token::String(_), .. } => + Ok(parser.next().map(|t| + if let Token::String(s) = t { + InputValue::string(s) + } + else { + panic!("Internal parser error"); + })), + Spanning { item: Token::Name("true"), .. } => + Ok(parser.next().map(|_| InputValue::boolean(true))), + Spanning { item: Token::Name("false"), .. } => + Ok(parser.next().map(|_| InputValue::boolean(false))), + Spanning { item: Token::Name("null"), .. } => + Err(parser.next().map(ParseError::UnexpectedToken)), + Spanning { item: Token::Name(name), .. } => + Ok(parser.next().map(|_| InputValue::enum_value(name.to_owned()))), + _ => Err(parser.next().map(ParseError::UnexpectedToken)), + } +} + +fn parse_list_literal<'a>(parser: &mut Parser<'a>, is_const: bool) -> ParseResult<'a, InputValue> { + Ok(try!(parser.delimited_list( + &Token::BracketOpen, + |p| parse_value_literal(p, is_const), + &Token::BracketClose + )).map(|items| InputValue::parsed_list(items))) +} + +fn parse_object_literal<'a>(parser: &mut Parser<'a>, is_const: bool) -> ParseResult<'a, InputValue> { + Ok(try!(parser.delimited_list( + &Token::CurlyOpen, + |p| parse_object_field(p, is_const), + &Token::CurlyClose + )).map(|items| InputValue::parsed_object(items.into_iter().map(|s| s.item).collect()))) +} + +fn parse_object_field<'a>(parser: &mut Parser<'a>, is_const: bool) -> ParseResult<'a, (Spanning, Spanning)> { + let key = try!(parser.expect_name()); + + try!(parser.expect(&Token::Colon)); + + let value = try!(parse_value_literal(parser, is_const)); + + Ok(Spanning::start_end( + &key.start.clone(), + &value.end.clone(), + (key, value))) +} + +fn parse_variable_literal<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, InputValue> { + let Spanning { start: start_pos, .. } = try!(parser.expect(&Token::Dollar)); + let Spanning { item: name, end: end_pos, ..} = try!(parser.expect_name()); + + Ok(Spanning::start_end( + &start_pos, + &end_pos, + InputValue::variable(name))) +} diff --git a/src/schema/meta.rs b/src/schema/meta.rs new file mode 100644 index 00000000..111df60c --- /dev/null +++ b/src/schema/meta.rs @@ -0,0 +1,631 @@ +//! Types used to describe a GraphQL schema + + +use std::fmt; + +use ast::{InputValue, FromInputValue, Type}; +use types::base::TypeKind; + +/// Scalar type metadata +pub struct ScalarMeta { + #[doc(hidden)] + pub name: String, + #[doc(hidden)] + pub description: Option, + #[doc(hidden)] + pub try_parse_fn: Box bool + Send + Sync>, +} + +/// List type metadata +#[derive(Debug)] +pub struct ListMeta { + #[doc(hidden)] + pub of_type: Type, +} + +/// Nullable type metadata +#[derive(Debug)] +pub struct NullableMeta { + #[doc(hidden)] + pub of_type: Type, +} + +/// Object type metadata +#[derive(Debug)] +pub struct ObjectMeta { + #[doc(hidden)] + pub name: String, + #[doc(hidden)] + pub description: Option, + #[doc(hidden)] + pub fields: Vec, + #[doc(hidden)] + pub interface_names: Vec, +} + +/// Enum type metadata +pub struct EnumMeta { + #[doc(hidden)] + pub name: String, + #[doc(hidden)] + pub description: Option, + #[doc(hidden)] + pub values: Vec, + #[doc(hidden)] + pub try_parse_fn: Box bool + Send + Sync>, +} + +/// Interface type metadata +#[derive(Debug)] +pub struct InterfaceMeta { + #[doc(hidden)] + pub name: String, + #[doc(hidden)] + pub description: Option, + #[doc(hidden)] + pub fields: Vec, +} + +/// Union type metadata +#[derive(Debug)] +pub struct UnionMeta { + #[doc(hidden)] + pub name: String, + #[doc(hidden)] + pub description: Option, + #[doc(hidden)] + pub of_type_names: Vec, +} + +/// Input object metadata +pub struct InputObjectMeta { + #[doc(hidden)] + pub name: String, + #[doc(hidden)] + pub description: Option, + #[doc(hidden)] + pub input_fields: Vec, + #[doc(hidden)] + pub try_parse_fn: Box bool + Send + Sync>, +} + +/// A placeholder for not-yet-registered types +/// +/// After a type's `meta` method has been called but before it has returned, a placeholder type +/// is inserted into a registry to indicate existence. +#[derive(Debug)] +pub struct PlaceholderMeta { + #[doc(hidden)] + pub of_type: Type, +} + +/// Generic type metadata +#[derive(Debug)] +pub enum MetaType { + #[doc(hidden)] + Scalar(ScalarMeta), + #[doc(hidden)] + List(ListMeta), + #[doc(hidden)] + Nullable(NullableMeta), + #[doc(hidden)] + Object(ObjectMeta), + #[doc(hidden)] + Enum(EnumMeta), + #[doc(hidden)] + Interface(InterfaceMeta), + #[doc(hidden)] + Union(UnionMeta), + #[doc(hidden)] + InputObject(InputObjectMeta), + #[doc(hidden)] + Placeholder(PlaceholderMeta), +} + +/// Metadata for a field +#[derive(Debug, Clone)] +pub struct Field { + #[doc(hidden)] + pub name: String, + #[doc(hidden)] + pub description: Option, + #[doc(hidden)] + pub arguments: Option>, + #[doc(hidden)] + pub field_type: Type, + #[doc(hidden)] + pub deprecation_reason: Option, +} + +/// Metadata for an argument to a field +#[derive(Debug, Clone)] +pub struct Argument { + #[doc(hidden)] + pub name: String, + #[doc(hidden)] + pub description: Option, + #[doc(hidden)] + pub arg_type: Type, + #[doc(hidden)] + pub default_value: Option, +} + +/// Metadata for a single value in an enum +#[derive(Debug, Clone)] +pub struct EnumValue { + /// The name of the enum value + /// + /// This is the string literal representation of the enum in responses. + pub name: String, + /// The optional description of the enum value. + /// + /// Note: this is not the description of the enum itself; it's the + /// description of this enum _value_. + pub description: Option, + /// The optional deprecation reason + /// + /// If this is `Some`, the field will be considered `isDeprecated`. + pub deprecation_reason: Option, +} + +impl MetaType { + /// Access the name of the type, if applicable + /// + /// Lists, non-null wrappers, and placeholders don't have names. + pub fn name(&self) -> Option<&str> { + match *self { + MetaType::Scalar(ScalarMeta { ref name, .. }) | + MetaType::Object(ObjectMeta { ref name, .. }) | + MetaType::Enum(EnumMeta { ref name, .. }) | + MetaType::Interface(InterfaceMeta { ref name, .. }) | + MetaType::Union(UnionMeta { ref name, .. }) | + MetaType::InputObject(InputObjectMeta { ref name, .. }) => + Some(name), + _ => None, + } + } + + /// Access the description of the type, if applicable + /// + /// Lists, nullable wrappers, and placeholders don't have names. + pub fn description(&self) -> Option<&String> { + match *self { + MetaType::Scalar(ScalarMeta { ref description, .. }) | + MetaType::Object(ObjectMeta { ref description, .. }) | + MetaType::Enum(EnumMeta { ref description, .. }) | + MetaType::Interface(InterfaceMeta { ref description, .. }) | + MetaType::Union(UnionMeta { ref description, .. }) | + MetaType::InputObject(InputObjectMeta { ref description, .. }) => + description.as_ref(), + _ => None, + } + } + + /// Construct a `TypeKind` for a given type + /// + /// # Panics + /// Panics if the type represents a placeholder or nullable type. + pub fn type_kind(&self) -> TypeKind { + match *self { + MetaType::Scalar(_) => TypeKind::Scalar, + MetaType::List(_) => TypeKind::List, + MetaType::Nullable(_) => panic!("Can't take type_kind of nullable meta type"), + MetaType::Object(_) => TypeKind::Object, + MetaType::Enum(_) => TypeKind::Enum, + MetaType::Interface(_) => TypeKind::Interface, + MetaType::Union(_) => TypeKind::Union, + MetaType::InputObject(_) => TypeKind::InputObject, + MetaType::Placeholder(_) => panic!("Can't take type_kind of placeholder meta type"), + } + } + + /// Access a field's meta data given its name + /// + /// Only objects and interfaces have fields. This method always returns `None` for other types. + pub fn field_by_name(&self, name: &str) -> Option<&Field> { + match *self { + MetaType::Object(ObjectMeta { ref fields, .. }) | + MetaType::Interface(InterfaceMeta { ref fields, .. }) => + fields.iter().filter(|f| f.name == name).next(), + _ => None, + } + } + + /// Access an input field's meta data given its name + /// + /// Only input objects have input fields. This method always returns `None` for other types. + pub fn input_field_by_name(&self, name: &str) -> Option<&Argument> { + match *self { + MetaType::InputObject(InputObjectMeta { ref input_fields, .. }) => + input_fields.iter().filter(|f| f.name == name).next(), + _ => None, + } + } + + /// Construct a `Type` literal instance based on the metadata + pub fn as_type(&self) -> Type { + match *self { + MetaType::Scalar(ScalarMeta { ref name, .. }) | + MetaType::Object(ObjectMeta { ref name, .. }) | + MetaType::Enum(EnumMeta { ref name, .. }) | + MetaType::Interface(InterfaceMeta { ref name, .. }) | + MetaType::Union(UnionMeta { ref name, .. }) | + MetaType::InputObject(InputObjectMeta { ref name, .. }) => + Type::NonNullNamed(name.to_owned()), + MetaType::List(ListMeta { ref of_type }) => + Type::NonNullList(Box::new(of_type.clone())), + MetaType::Nullable(NullableMeta { ref of_type }) => + match *of_type { + Type::NonNullNamed(ref inner) => Type::Named(inner.to_owned()), + Type::NonNullList(ref inner) => Type::List(inner.clone()), + ref t => t.clone(), + }, + MetaType::Placeholder(PlaceholderMeta { ref of_type }) => of_type.clone(), + } + } + + /// Access the input value parse function, if applicable + /// + /// An input value parse function is a function that takes an `InputValue` instance and returns + /// `true` if it can be parsed as the provided type. + /// + /// Only scalars, enums, and input objects have parse functions. + pub fn input_value_parse_fn(&self) -> Option<&Box bool + Send + Sync>> { + match *self { + MetaType::Scalar(ScalarMeta { ref try_parse_fn, .. }) | + MetaType::Enum(EnumMeta { ref try_parse_fn, .. }) | + MetaType::InputObject(InputObjectMeta { ref try_parse_fn, .. }) => + Some(try_parse_fn), + _ => None, + } + } + + /// Returns true if the type is a composite type + /// + /// Objects, interfaces, and unions are composite. + pub fn is_composite(&self) -> bool { + match *self { + MetaType::Object(_) | + MetaType::Interface(_) | + MetaType::Union(_) => true, + _ => false, + } + } + + /// Returns true if the type can occur in leaf positions in queries + /// + /// Only enums and scalars are leaf types. + pub fn is_leaf(&self) -> bool { + match *self { + MetaType::Enum(_) | + MetaType::Scalar(_) => true, + _ => false, + } + } + + /// Returns true if the type is abstract + /// + /// Only interfaces and unions are abstract types. + pub fn is_abstract(&self) -> bool { + match *self { + MetaType::Interface(_) | + MetaType::Union(_) => true, + _ => false, + } + } + + /// Returns true if the type can be used in input positions, e.g. arguments or variables + /// + /// Only scalars, enums, and input objects are input types. + pub fn is_input(&self) -> bool { + match *self { + MetaType::Scalar(_) | + MetaType::Enum(_) | + MetaType::InputObject(_) => true, + _ => false, + } + } +} + +impl ScalarMeta { + /// Build a new scalar type metadata with the specified name + pub fn new(name: &str) -> ScalarMeta { + ScalarMeta { + name: name.to_owned(), + description: None, + try_parse_fn: Box::new( + |v: &InputValue| ::from(v).is_some()), + } + } + + /// Set the description for the given scalar type + /// + /// If a description already was set prior to calling this method, it will be overwritten. + pub fn description(mut self, description: &str) -> ScalarMeta { + self.description = Some(description.to_owned()); + self + } + + /// Wrap the scalar in a generic meta type + pub fn into_meta(self) -> MetaType { + MetaType::Scalar(self) + } +} + +impl ListMeta { + /// Build a new list type by wrapping the specified type + pub fn new(of_type: Type) -> ListMeta { + ListMeta { + of_type: of_type, + } + } + + /// Wrap the list in a generic meta type + pub fn into_meta(self) -> MetaType { + MetaType::List(self) + } +} + +impl NullableMeta { + /// Build a new nullable type by wrapping the specified type + pub fn new(of_type: Type) -> NullableMeta { + NullableMeta { + of_type: of_type, + } + } + + /// Wrap the nullable type in a generic meta type + pub fn into_meta(self) -> MetaType { + MetaType::Nullable(self) + } +} + +impl ObjectMeta { + /// Build a new object type with the specified name and fields + pub fn new(name: &str, fields: &[Field]) -> ObjectMeta { + ObjectMeta { + name: name.to_owned(), + description: None, + fields: fields.to_vec(), + interface_names: vec![], + } + } + + /// Set the description for the object + /// + /// If a description was provided prior to calling this method, it will be overwritten. + pub fn description(mut self, description: &str) -> ObjectMeta { + self.description = Some(description.to_owned()); + self + } + + /// Set the interfaces this type implements + /// + /// If a list of interfaces already was provided prior to calling this method, they will be + /// overwritten. + pub fn interfaces(mut self, interfaces: &[Type]) -> ObjectMeta { + self.interface_names = interfaces.iter() + .map(|t| t.innermost_name().to_owned()).collect(); + self + } + + /// Wrap this object type in a generic meta type + pub fn into_meta(self) -> MetaType { + MetaType::Object(self) + } +} + +impl EnumMeta { + /// Build a new enum type with the specified name and possible values + pub fn new(name: &str, values: &[EnumValue]) -> EnumMeta { + EnumMeta { + name: name.to_owned(), + description: None, + values: values.to_vec(), + try_parse_fn: Box::new( + |v: &InputValue| ::from(v).is_some()), + } + } + + /// Set the description of the type + /// + /// If a description was provided prior to calling this method, it will be overwritten + pub fn description(mut self, description: &str) -> EnumMeta { + self.description = Some(description.to_owned()); + self + } + + /// Wrap this enum type in a generic meta type + pub fn into_meta(self) -> MetaType { + MetaType::Enum(self) + } +} + +impl InterfaceMeta { + /// Build a new interface type with the specified name and fields + pub fn new(name: &str, fields: &[Field]) -> InterfaceMeta { + InterfaceMeta { + name: name.to_owned(), + description: None, + fields: fields.to_vec(), + } + } + + /// Set the description of the type + /// + /// If a description was provided prior to calling this method, it will be overwritten. + pub fn description(mut self, description: &str) -> InterfaceMeta { + self.description = Some(description.to_owned()); + self + } + + /// Wrap this interface type in a generic meta type + pub fn into_meta(self) -> MetaType { + MetaType::Interface(self) + } +} + +impl UnionMeta { + /// Build a new union type with the specified name and possible types + pub fn new(name: &str, of_types: &[Type]) -> UnionMeta { + UnionMeta { + name: name.to_owned(), + description: None, + of_type_names: of_types.iter() + .map(|t| t.innermost_name().to_owned()).collect(), + } + } + + /// Set the description of the type + /// + /// If a description was provided prior to calling this method, it will be overwritten. + pub fn description(mut self, description: &str) -> UnionMeta { + self.description = Some(description.to_owned()); + self + } + + /// Wrap this union type in a generic meta type + pub fn into_meta(self) -> MetaType { + MetaType::Union(self) + } +} + +impl InputObjectMeta { + /// Build a new input type with the specified name and input fields + pub fn new(name: &str, input_fields: &[Argument]) -> InputObjectMeta { + InputObjectMeta { + name: name.to_owned(), + description: None, + input_fields: input_fields.to_vec(), + try_parse_fn: Box::new( + |v: &InputValue| ::from(v).is_some()), + } + } + + /// Set the description of the type + /// + /// If a description was provided prior to calling this method, it will be overwritten. + pub fn description(mut self, description: &str) -> InputObjectMeta { + self.description = Some(description.to_owned()); + self + } + + /// Wrap this union type in a generic meta type + pub fn into_meta(self) -> MetaType { + MetaType::InputObject(self) + } +} + +impl Field { + /// Set the description of the field + /// + /// This overwrites the description if any was previously set. + pub fn description(mut self, description: &str) -> Field { + self.description = Some(description.to_owned()); + self + } + + /// Add an argument to the field + /// + /// Arguments are unordered and can't contain duplicates by name. + pub fn argument(mut self, argument: Argument) -> Field { + match self.arguments { + None => { self.arguments = Some(vec![argument]); } + Some(ref mut args) => { args.push(argument); } + }; + + self + } + + /// Set the deprecation reason + /// + /// This overwrites the deprecation reason if any was previously set. + pub fn deprecated(mut self, reason: &str) -> Field { + self.deprecation_reason = Some(reason.to_owned()); + self + } +} + +impl Argument { + #[doc(hidden)] + pub fn new(name: &str, arg_type: Type) -> Argument { + Argument { + name: name.to_owned(), + description: None, + arg_type: arg_type, + default_value: None + } + } + + /// Set the description of the argument + /// + /// This overwrites the description if any was previously set. + pub fn description(mut self, description: &str) -> Argument { + self.description = Some(description.to_owned()); + self + } + + /// Set the default value of the argument + /// + /// This overwrites the description if any was previously set. + pub fn default_value(mut self, default_value: InputValue) -> Argument { + self.default_value = Some(default_value); + self + } +} + +impl EnumValue { + /// Construct a new enum value with the provided name + pub fn new(name: &str) -> EnumValue { + EnumValue { + name: name.to_owned(), + description: None, + deprecation_reason: None, + } + } + + /// Set the description of the enum value + /// + /// This overwrites the description if any was previously set. + pub fn description(mut self, description: &str) -> EnumValue { + self.description = Some(description.to_owned()); + self + } + + /// Set the deprecation reason for the enum value + /// + /// This overwrites the deprecation reason if any was previously set. + pub fn deprecated(mut self, reason: &str) -> EnumValue { + self.deprecation_reason = Some(reason.to_owned()); + self + } +} + +impl fmt::Debug for ScalarMeta { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt.debug_struct("ScalarMeta") + .field("name", &self.name) + .field("description", &self.description) + .finish() + } +} + +impl fmt::Debug for EnumMeta { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt.debug_struct("EnumMeta") + .field("name", &self.name) + .field("description", &self.description) + .field("values", &self.values) + .finish() + } +} + +impl fmt::Debug for InputObjectMeta { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt.debug_struct("InputObjectMeta") + .field("name", &self.name) + .field("description", &self.description) + .field("input_fields", &self.input_fields) + .finish() + } +} diff --git a/src/schema/mod.rs b/src/schema/mod.rs new file mode 100644 index 00000000..3ec3af0d --- /dev/null +++ b/src/schema/mod.rs @@ -0,0 +1,3 @@ +pub mod model; +pub mod schema; +pub mod meta; diff --git a/src/schema/model.rs b/src/schema/model.rs new file mode 100644 index 00000000..2248161c --- /dev/null +++ b/src/schema/model.rs @@ -0,0 +1,340 @@ +use std::collections::HashMap; +use std::marker::PhantomData; +use std::fmt; + +use types::base::{GraphQLType}; +use types::schema::Registry; +use ast::Type; +use schema::meta::{MetaType, ObjectMeta, PlaceholderMeta, UnionMeta, InterfaceMeta, Argument}; + +/// Root query node of a schema +/// +/// This brings the mutatino and query types together, and provides the +/// predefined metadata fields. +pub struct RootNode { + #[doc(hidden)] + pub query_type: QueryT, + #[doc(hidden)] + pub mutation_type: MutationT, + #[doc(hidden)] + pub schema: SchemaType, + phantom_wrapped: PhantomData, +} + +/// Metadata for a schema +pub struct SchemaType { + types: HashMap, + query_type_name: String, + mutation_type_name: Option, + directives: HashMap, +} + +pub enum TypeType<'a> { + Concrete(&'a MetaType), + NonNull(Box>), + List(Box>), +} + +pub struct DirectiveType { + pub name: String, + pub description: Option, + pub locations: Vec, + pub arguments: Vec, +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum DirectiveLocation { + Query, + Mutation, + Field, + FragmentDefinition, + FragmentSpread, + InlineFragment, +} + +impl RootNode + where QueryT: GraphQLType, + MutationT: GraphQLType, +{ + /// Construct a new root node from query and mutation nodes + /// + /// If the schema should not support mutations, you can pass in `()` to + /// remove the mutation type from the schema. + pub fn new(query_obj: QueryT, mutation_obj: MutationT) -> RootNode { + RootNode { + query_type: query_obj, + mutation_type: mutation_obj, + schema: SchemaType::new::(), + phantom_wrapped: PhantomData, + } + } +} + +impl SchemaType { + pub fn new() -> SchemaType + where QueryT: GraphQLType, + MutationT: GraphQLType, + { + let mut types = HashMap::new(); + let mut directives = HashMap::new(); + let query_type_name: String; + let mutation_type_name: String; + + { + let mut registry = Registry::::new(types); + query_type_name = registry.get_type::().innermost_name().to_owned(); + mutation_type_name = registry.get_type::().innermost_name().to_owned(); + types = registry.types; + } + + { + let mut registry = Registry::::new(types); + registry.get_type::(); + directives.insert( + "skip".to_owned(), + DirectiveType::new_skip(&mut registry)); + directives.insert( + "include".to_owned(), + DirectiveType::new_include(&mut registry)); + + let mut meta_fields = vec![ + registry.field::("__schema"), + registry.field::("__type") + .argument(registry.arg::("name")), + ]; + + if let Some(root_type) = registry.types.get_mut(&query_type_name) { + if let &mut MetaType::Object(ObjectMeta { ref mut fields, .. }) = root_type { + fields.append(&mut meta_fields); + } + else { + panic!("Root type is not an object"); + } + } + else { + panic!("Root type not found"); + } + + types = registry.types; + } + + for meta_type in types.values() { + if let MetaType::Placeholder(PlaceholderMeta { ref of_type }) = *meta_type { + panic!("Type {:?} is still a placeholder type", of_type); + } + } + + SchemaType { + types: types, + query_type_name: query_type_name, + mutation_type_name: if &mutation_type_name != "__Unit" { Some(mutation_type_name) } else { None }, + directives: directives, + } + } + + pub fn add_directive(&mut self, directive: DirectiveType) { + self.directives.insert(directive.name.clone(), directive); + } + + pub fn type_by_name(&self, name: &str) -> Option { + self.types.get(name).map(|t| TypeType::Concrete(t)) + } + + pub fn concrete_type_by_name(&self, name: &str) -> Option<&MetaType> { + self.types.get(name) + } + + pub fn query_type(&self) -> TypeType { + TypeType::Concrete( + self.types.get(&self.query_type_name) + .expect("Query type does not exist in schema")) + } + + pub fn concrete_query_type(&self) -> &MetaType { + self.types.get(&self.query_type_name) + .expect("Query type does not exist in schema") + } + + pub fn mutation_type(&self) -> Option { + if let Some(ref mutation_type_name) = self.mutation_type_name { + Some(self.type_by_name(mutation_type_name) + .expect("Mutation type does not exist in schema")) + } + else { + None + } + } + + pub fn concrete_mutation_type(&self) -> Option<&MetaType> { + self.mutation_type_name.as_ref().map(|name| + self.concrete_type_by_name(name) + .expect("Mutation type does not exist in schema")) + } + + pub fn type_list(&self) -> Vec { + self.types.values().map(|t| TypeType::Concrete(t)).collect() + } + + pub fn concrete_type_list(&self) -> Vec<&MetaType> { + self.types.values().collect() + } + + pub fn make_type(&self, t: &Type) -> TypeType { + match *t { + Type::NonNullNamed(ref n) => + TypeType::NonNull(Box::new( + self.type_by_name(n).expect("Type not found in schema"))), + Type::NonNullList(ref inner) => + TypeType::NonNull(Box::new( + TypeType::List(Box::new(self.make_type(inner))))), + Type::Named(ref n) => self.type_by_name(n).expect("Type not found in schema"), + Type::List(ref inner) => + TypeType::List(Box::new(self.make_type(inner))), + } + } + + pub fn directive_list(&self) -> Vec<&DirectiveType> { + self.directives.values().collect() + } + + pub fn directive_by_name(&self, name: &str) -> Option<&DirectiveType> { + self.directives.get(name) + } + + pub fn type_overlap(&self, t1: &MetaType, t2: &MetaType) -> bool { + if (t1 as *const MetaType) == (t2 as *const MetaType) { + return true; + } + + match (t1.is_abstract(), t2.is_abstract()) { + (true, true) => self.possible_types(t1).iter().any(|t| self.is_possible_type(t2, t)), + (true, false) => self.is_possible_type(t1, t2), + (false, true) => self.is_possible_type(t2, t1), + (false, false) => false, + } + } + + pub fn possible_types(&self, t: &MetaType) -> Vec<&MetaType> { + match *t { + MetaType::Union(UnionMeta { ref of_type_names, .. }) => + of_type_names + .iter() + .flat_map(|t| self.concrete_type_by_name(t)) + .collect(), + MetaType::Interface(InterfaceMeta { ref name, .. }) => + self.concrete_type_list() + .into_iter() + .filter(|t| match **t { + MetaType::Object(ObjectMeta { ref interface_names, .. }) => + interface_names.iter().any(|iname| iname == name), + _ => false + }) + .collect(), + _ => panic!("Can't retrieve possible types from non-abstract meta type") + } + } + + pub fn is_possible_type(&self, abstract_type: &MetaType, possible_type: &MetaType) -> bool { + self.possible_types(abstract_type) + .into_iter() + .any(|t| (t as *const MetaType) == (possible_type as *const MetaType)) + } + + pub fn is_subtype(&self, sub_type: &Type, super_type: &Type) -> bool { + use ast::Type::*; + + if super_type == sub_type { + return true; + } + + match (super_type, sub_type) { + (&NonNullNamed(ref super_name), &NonNullNamed(ref sub_name)) | + (&Named(ref super_name), &Named(ref sub_name)) | + (&Named(ref super_name), &NonNullNamed(ref sub_name)) => + self.is_named_subtype(sub_name, super_name), + (&NonNullList(ref super_inner), &NonNullList(ref sub_inner)) | + (&List(ref super_inner), &List(ref sub_inner)) | + (&List(ref super_inner), &NonNullList(ref sub_inner)) => + self.is_subtype(sub_inner, super_inner), + _ => false + } + } + + pub fn is_named_subtype(&self, sub_type_name: &str, super_type_name: &str) -> bool { + if sub_type_name == super_type_name { + true + } + else if let (Some(sub_type), Some(super_type)) + = (self.concrete_type_by_name(sub_type_name), self.concrete_type_by_name(super_type_name)) + { + super_type.is_abstract() && self.is_possible_type(super_type, sub_type) + } + else { + false + } + } +} + +impl<'a> TypeType<'a> { + pub fn to_concrete(&self) -> Option<&'a MetaType> { + match *self { + TypeType::Concrete(t) => Some(t), + _ => None + } + } +} + +impl DirectiveType { + pub fn new(name: &str, locations: &[DirectiveLocation], arguments: &[Argument]) -> DirectiveType { + DirectiveType { + name: name.to_owned(), + description: None, + locations: locations.to_vec(), + arguments: arguments.to_vec(), + } + } + + fn new_skip(registry: &mut Registry) -> DirectiveType { + Self::new( + "skip", + &[ + DirectiveLocation::Field, + DirectiveLocation::FragmentSpread, + DirectiveLocation::InlineFragment, + ], + &[ + registry.arg::("if"), + ]) + } + + fn new_include(registry: &mut Registry) -> DirectiveType { + Self::new( + "include", + &[ + DirectiveLocation::Field, + DirectiveLocation::FragmentSpread, + DirectiveLocation::InlineFragment, + ], + &[ + registry.arg::("if"), + ]) + } + + pub fn description(mut self, description: &str) -> DirectiveType { + self.description = Some(description.to_owned()); + self + } +} + +impl fmt::Display for DirectiveLocation { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match *self { + DirectiveLocation::Query => "query", + DirectiveLocation::Mutation => "mutation", + DirectiveLocation::Field => "field", + DirectiveLocation::FragmentDefinition => "fragment definition", + DirectiveLocation::FragmentSpread => "fragment spread", + DirectiveLocation::InlineFragment => "inline fragment", + }) + } +} diff --git a/src/schema/schema.rs b/src/schema/schema.rs new file mode 100644 index 00000000..2a5e0160 --- /dev/null +++ b/src/schema/schema.rs @@ -0,0 +1,251 @@ +use rustc_serialize::json::ToJson; + +use types::base::{GraphQLType, Arguments, TypeKind}; +use types::schema::{Executor, Registry, FieldResult, ExecutionResult}; + +use schema::meta::{MetaType, ObjectMeta, EnumMeta, InputObjectMeta, UnionMeta, InterfaceMeta, + Field, Argument, EnumValue}; +use schema::model::{RootNode, SchemaType, TypeType, DirectiveType, DirectiveLocation}; + +impl GraphQLType for RootNode + where QueryT: GraphQLType, + MutationT: GraphQLType +{ + fn name() -> Option<&'static str> { + QueryT::name() + } + + fn meta(registry: &mut Registry) -> MetaType { + QueryT::meta(registry) + } + + fn resolve_field(&self, field: &str, args: &Arguments, executor: &mut Executor) -> ExecutionResult { + match field { + "__schema" => executor.replaced_context(&self.schema).resolve(&self.schema), + "__type" => { + let type_name: String = args.get("name").unwrap(); + executor.replaced_context(&self.schema).resolve(&self.schema.type_by_name(&type_name)) + }, + _=> self.query_type.resolve_field(field, args, executor), + } + } +} + +graphql_object!(SchemaType: SchemaType as "__Schema" |&self| { + field types() -> FieldResult> { + Ok(self.type_list()) + } + + field query_type() -> FieldResult { + Ok(self.query_type()) + } + + field mutation_type() -> FieldResult> { + Ok(self.mutation_type()) + } + + field directives() -> FieldResult> { + Ok(self.directive_list()) + } +}); + +graphql_object!(<'a> TypeType<'a>: SchemaType as "__Type" |&self| { + field name() -> FieldResult> { + Ok(match *self { + TypeType::Concrete(t) => t.name(), + _ => None, + }) + } + + field description() -> FieldResult> { + Ok(match *self { + TypeType::Concrete(t) => t.description(), + _ => None, + }) + } + + field kind() -> FieldResult { + Ok(match *self { + TypeType::Concrete(t) => t.type_kind(), + TypeType::List(_) => TypeKind::List, + TypeType::NonNull(_) => TypeKind::NonNull, + }) + } + + field fields(include_deprecated = false: bool) -> FieldResult>> { + Ok(match *self { + TypeType::Concrete(&MetaType::Interface(InterfaceMeta { ref fields, .. })) | + TypeType::Concrete(&MetaType::Object(ObjectMeta { ref fields, .. })) => + Some(fields + .iter() + .filter(|f| include_deprecated || f.deprecation_reason.is_none()) + .collect()), + _ => None, + }) + } + + field of_type() -> FieldResult> { + Ok(match *self { + TypeType::Concrete(_) => None, + TypeType::List(ref l) | TypeType::NonNull(ref l) => Some(l), + }) + } + + field input_fields() -> FieldResult>> { + Ok(match *self { + TypeType::Concrete(&MetaType::InputObject(InputObjectMeta { ref input_fields, .. })) => + Some(input_fields), + _ => None, + }) + } + + field interfaces(&mut executor) -> FieldResult>> { + Ok(match *self { + TypeType::Concrete(&MetaType::Object(ObjectMeta { ref interface_names, .. })) => { + let schema = executor.context(); + Some(interface_names + .iter() + .filter_map(|n| schema.type_by_name(n)) + .collect()) + } + _ => None, + }) + } + + field possible_types(&mut executor) -> FieldResult>> { + let schema = executor.context(); + Ok(match *self { + TypeType::Concrete(&MetaType::Union(UnionMeta { ref of_type_names, .. })) => { + Some(of_type_names + .iter() + .filter_map(|tn| schema.type_by_name(tn)) + .collect()) + } + TypeType::Concrete(&MetaType::Interface(InterfaceMeta { name: ref iface_name, .. })) => { + Some(schema.concrete_type_list() + .iter() + .filter_map(|&ct| + if let &MetaType::Object(ObjectMeta { ref name, ref interface_names, .. }) = ct { + if interface_names.contains(iface_name) { + schema.type_by_name(name) + } else { None } + } else { None } + ) + .collect()) + } + _ => None, + }) + } + + field enum_values(include_deprecated = false: bool) -> FieldResult>> { + Ok(match *self { + TypeType::Concrete(&MetaType::Enum(EnumMeta { ref values, .. })) => + Some(values + .iter() + .filter(|f| include_deprecated || f.deprecation_reason.is_none()) + .collect()), + _ => None, + }) + } +}); + +graphql_object!(Field: SchemaType as "__Field" |&self| { + field name() -> FieldResult<&String> { + Ok(&self.name) + } + + field description() -> FieldResult<&Option> { + Ok(&self.description) + } + + field args() -> FieldResult> { + Ok(self.arguments.as_ref().map_or_else(|| Vec::new(), |v| v.iter().collect())) + } + + field type(&mut executor) -> FieldResult { + Ok(executor.context().make_type(&self.field_type)) + } + + field is_deprecated() -> FieldResult { + Ok(self.deprecation_reason.is_some()) + } + + field deprecation_reason() -> FieldResult<&Option> { + Ok(&self.deprecation_reason) + } +}); + +graphql_object!(Argument: SchemaType as "__InputValue" |&self| { + field name() -> FieldResult<&String> { + Ok(&self.name) + } + + field description() -> FieldResult<&Option> { + Ok(&self.description) + } + + field type(&mut executor) -> FieldResult { + Ok(executor.context().make_type(&self.arg_type)) + } + + field default_value() -> FieldResult> { + Ok(self.default_value.as_ref().map(|v| v.to_json().to_string())) + } +}); + +graphql_object!(EnumValue: SchemaType as "__EnumValue" |&self| { + field name() -> FieldResult<&String> { + Ok(&self.name) + } + + field description() -> FieldResult<&Option> { + Ok(&self.description) + } + + field is_deprecated() -> FieldResult { + Ok(self.deprecation_reason.is_some()) + } + + field deprecation_reason() -> FieldResult<&Option> { + Ok(&self.deprecation_reason) + } +}); + +graphql_enum!(TypeKind as "__TypeKind" { + TypeKind::Scalar => "SCALAR", + TypeKind::Object => "OBJECT", + TypeKind::Interface => "INTERFACE", + TypeKind::Union => "UNION", + TypeKind::Enum => "ENUM", + TypeKind::InputObject => "INPUT_OBJECT", + TypeKind::List => "LIST", + TypeKind::NonNull => "NON_NULL", +}); + + +graphql_object!(DirectiveType: SchemaType as "__Directive" |&self| { + field name() -> FieldResult<&String> { + Ok(&self.name) + } + + field description() -> FieldResult<&Option> { + Ok(&self.description) + } + + field locations() -> FieldResult<&Vec> { + Ok(&self.locations) + } + + field args() -> FieldResult<&Vec> { + Ok(&self.arguments) + } +}); + +graphql_enum!(DirectiveLocation as "__DirectiveLocation" { + DirectiveLocation::Query => "QUERY", + DirectiveLocation::Mutation => "MUTATION", + DirectiveLocation::Field => "FIELD", + DirectiveLocation::FragmentDefinition => "FRAGMENT_DEFINITION", + DirectiveLocation::FragmentSpread => "FRAGMENT_SPREAD", + DirectiveLocation::InlineFragment => "INLINE_FRAGMENT", +}); diff --git a/src/tests/bench.rs b/src/tests/bench.rs new file mode 100644 index 00000000..27c39031 --- /dev/null +++ b/src/tests/bench.rs @@ -0,0 +1,125 @@ +use test::Bencher; + +use std::collections::{HashMap}; + +use schema::model::RootNode; +use tests::model::Database; + +#[bench] +fn query_type_name(b: &mut Bencher) { + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + let doc = r#" + query IntrospectionQueryTypeQuery { + __schema { + queryType { + name + } + } + }"#; + + b.iter(|| ::execute(doc, None, &schema, &HashMap::new(), &database)); +} + +#[bench] +fn introspection_query(b: &mut Bencher) { + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + let doc = r#" + query IntrospectionQuery { + __schema { + queryType { name } + mutationType { name } + subscriptionType { name } + types { + ...FullType + } + directives { + name + description + locations + args { + ...InputValue + } + } + } + } + + fragment FullType on __Type { + kind + name + description + fields(includeDeprecated: true) { + name + description + args { + ...InputValue + } + type { + ...TypeRef + } + isDeprecated + deprecationReason + } + inputFields { + ...InputValue + } + interfaces { + ...TypeRef + } + enumValues(includeDeprecated: true) { + name + description + isDeprecated + deprecationReason + } + possibleTypes { + ...TypeRef + } + } + + fragment InputValue on __InputValue { + name + description + type { ...TypeRef } + defaultValue + } + + fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } + } + } + } + } +"#; + + b.iter(|| ::execute(doc, None, &schema, &HashMap::new(), &database)); +} diff --git a/src/tests/introspection_tests.rs b/src/tests/introspection_tests.rs new file mode 100644 index 00000000..3ffa6326 --- /dev/null +++ b/src/tests/introspection_tests.rs @@ -0,0 +1,166 @@ +use std::collections::{HashMap, HashSet}; + +use value::Value; +use schema::model::RootNode; +use tests::model::Database; + +#[test] +fn test_query_type_name() { + let doc = r#" + query IntrospectionQueryTypeQuery { + __schema { + queryType { + name + } + } + }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("__schema", Value::object(vec![ + ("queryType", Value::object(vec![ + ("name", Value::string("Query")), + ].into_iter().collect())), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_specific_type_name() { + let doc = r#" + query IntrospectionQueryTypeQuery { + __type(name: "Droid") { + name + } + }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("__type", Value::object(vec![ + ("name", Value::string("Droid")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_specific_object_type_name_and_kind() { + let doc = r#" + query IntrospectionDroidKindQuery { + __type(name: "Droid") { + name + kind + } + } + "#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("__type", Value::object(vec![ + ("name", Value::string("Droid")), + ("kind", Value::string("OBJECT")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_specific_interface_type_name_and_kind() { + let doc = r#" + query IntrospectionDroidKindQuery { + __type(name: "Character") { + name + kind + } + } + "#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("__type", Value::object(vec![ + ("name", Value::string("Character")), + ("kind", Value::string("INTERFACE")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_documentation() { + let doc = r#" + query IntrospectionDroidDescriptionQuery { + __type(name: "Droid") { + name + description + } + } + "#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok(( + Value::object(vec![ + ("__type", Value::object(vec![ + ("name", Value::string("Droid")), + ("description", Value::string("A mechanical creature in the Star Wars universe.")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_possible_types() { + let doc = r#" + query IntrospectionDroidDescriptionQuery { + __type(name: "Character") { + possibleTypes { + name + } + } + } + "#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + let result = ::execute(doc, None, &schema, &HashMap::new(), &database); + + println!("Result: {:#?}", result); + + let (result, errors) = result.ok().expect("Query returned error"); + + assert_eq!(errors, vec![]); + + let possible_types = result + .as_object_value().expect("execution result not an object") + .get("__type").expect("'__type' not present in result") + .as_object_value().expect("'__type' not an object") + .get("possibleTypes").expect("'possibleTypes' not present in '__type'") + .as_list_value().expect("'possibleTypes' not a list") + .iter().map(|t| t + .as_object_value().expect("possible type not an object") + .get("name").expect("'name' not present in type") + .as_string_value().expect("'name' not a string")) + .collect::>(); + + assert_eq!( + possible_types, + vec![ + "Human", + "Droid", + ].into_iter().collect()); +} diff --git a/src/tests/mod.rs b/src/tests/mod.rs new file mode 100644 index 00000000..6ffadc36 --- /dev/null +++ b/src/tests/mod.rs @@ -0,0 +1,7 @@ +pub mod model; +mod schema; +pub mod query_tests; +pub mod introspection_tests; + +#[cfg(feature="nightly")] +pub mod bench; diff --git a/src/tests/model.rs b/src/tests/model.rs new file mode 100644 index 00000000..bfde3b40 --- /dev/null +++ b/src/tests/model.rs @@ -0,0 +1,223 @@ +use std::collections::HashMap; + +#[derive(Copy, Clone, Eq, PartialEq, Debug)] +pub enum Episode { + NewHope, + Empire, + Jedi, +} + +pub trait Character { + fn id(&self) -> &str; + fn name(&self) -> &str; + fn friend_ids(&self) -> &[String]; + fn appears_in(&self) -> &[Episode]; + fn secret_backstory(&self) -> &Option; + fn as_character(&self) -> &Character; +} + +pub trait Human: Character { + fn home_planet(&self) -> &Option; +} + +pub trait Droid: Character { + fn primary_function(&self) -> &Option; +} + +struct HumanData { + id: String, + name: String, + friend_ids: Vec, + appears_in: Vec, + secret_backstory: Option, + home_planet: Option, +} + +struct DroidData { + id: String, + name: String, + friend_ids: Vec, + appears_in: Vec, + secret_backstory: Option, + primary_function: Option, +} + +impl Character for HumanData { + fn id(&self) -> &str { &self.id } + fn name(&self) -> &str { &self.name } + fn friend_ids(&self) -> &[String] { &self.friend_ids } + fn appears_in(&self) -> &[Episode] { &self.appears_in } + fn secret_backstory(&self) -> &Option { &self.secret_backstory } + fn as_character(&self) -> &Character { self } +} + +impl Human for HumanData { + fn home_planet(&self) -> &Option { &self.home_planet } +} + +impl Character for DroidData { + fn id(&self) -> &str { &self.id } + fn name(&self) -> &str { &self.name } + fn friend_ids(&self) -> &[String] { &self.friend_ids } + fn appears_in(&self) -> &[Episode] { &self.appears_in } + fn secret_backstory(&self) -> &Option { &self.secret_backstory } + fn as_character(&self) -> &Character { self } +} + +impl Droid for DroidData { + fn primary_function(&self) -> &Option { &self.primary_function } +} + +pub struct Database { + humans: HashMap, + droids: HashMap, +} + +impl HumanData { + pub fn new( + id: &str, + name: &str, + friend_ids: &[&str], + appears_in: &[Episode], + secret_backstory: Option<&str>, + home_planet: Option<&str>) -> HumanData + { + HumanData { + id: id.to_owned(), + name: name.to_owned(), + friend_ids: friend_ids.to_owned().into_iter().map(|f| f.to_owned()).collect(), + appears_in: appears_in.iter().cloned().collect(), + secret_backstory: secret_backstory.map(|b| b.to_owned()), + home_planet: home_planet.map(|p| p.to_owned()), + } + } +} + +impl DroidData { + pub fn new( + id: &str, + name: &str, + friend_ids: &[&str], + appears_in: &[Episode], + secret_backstory: Option<&str>, + primary_function: Option<&str>) -> DroidData + { + DroidData { + id: id.to_owned(), + name: name.to_owned(), + friend_ids: friend_ids.to_owned().into_iter().map(|f| f.to_owned()).collect(), + appears_in: appears_in.iter().cloned().collect(), + secret_backstory: secret_backstory.map(|b| b.to_owned()), + primary_function: primary_function.map(|p| p.to_owned()), + } + } +} + +impl Database { + pub fn new() -> Database { + let mut humans = HashMap::new(); + let mut droids = HashMap::new(); + + humans.insert("1000".to_owned(), HumanData::new( + "1000", + "Luke Skywalker", + &["1002", "1003", "2000", "2001"], + &[Episode::NewHope, Episode::Empire, Episode::Jedi], + None, + Some("Tatooine"), + )); + + humans.insert("1001".to_owned(), HumanData::new( + "1001", + "Darth Vader", + &["1004"], + &[Episode::NewHope, Episode::Empire, Episode::Jedi], + None, + Some("Tatooine"), + )); + + humans.insert("1002".to_owned(), HumanData::new( + "1002", + "Han Solo", + &["1000", "1003", "2001"], + &[Episode::NewHope, Episode::Empire, Episode::Jedi], + None, + None, + )); + + humans.insert("1003".to_owned(), HumanData::new( + "1003", + "Leia Organa", + &["1000", "1002", "2000", "2001"], + &[Episode::NewHope, Episode::Empire, Episode::Jedi], + None, + Some("Alderaan"), + )); + + humans.insert("1004".to_owned(), HumanData::new( + "1004", + "Wilhuff Tarkin", + &["1001"], + &[Episode::NewHope], + None, + None, + )); + + droids.insert("2000".to_owned(), DroidData::new( + "2000", + "C-3PO", + &["1000", "1002", "1003", "2001"], + &[Episode::NewHope, Episode::Empire, Episode::Jedi], + None, + Some("Protocol"), + )); + + droids.insert("2001".to_owned(), DroidData::new( + "2001", + "R2-D2", + &["1000", "1002", "1003"], + &[Episode::NewHope, Episode::Empire, Episode::Jedi], + None, + Some("Astromech"), + )); + + Database { + humans: humans, + droids: droids, + } + } + + pub fn get_hero(&self, episode: Option) -> &Character { + if episode == Some(Episode::Empire) { + self.get_human("1000").unwrap().as_character() + } else { + self.get_droid("2001").unwrap().as_character() + } + } + + pub fn get_human(&self, id: &str) -> Option<&Human> { + self.humans.get(id).map(|h| h as &Human) + } + + pub fn get_droid(&self, id: &str) -> Option<&Droid> { + self.droids.get(id).map(|d| d as &Droid) + } + + pub fn get_character(&self, id: &str) -> Option<&Character> { + if let Some(h) = self.humans.get(id) { + Some(h) + } + else if let Some(d) = self.droids.get(id) { + Some(d) + } + else { + None + } + } + + pub fn get_friends(&self, c: &Character) -> Vec<&Character> { + c.friend_ids().iter() + .flat_map(|id| self.get_character(id)) + .collect() + } +} diff --git a/src/tests/query_tests.rs b/src/tests/query_tests.rs new file mode 100644 index 00000000..3f128049 --- /dev/null +++ b/src/tests/query_tests.rs @@ -0,0 +1,364 @@ +use std::collections::HashMap; + +use ast::InputValue; +use value::Value; +use schema::model::RootNode; +use tests::model::Database; + +#[test] +fn test_hero_name() { + let doc = r#" + { + hero { + name + } + }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("hero", Value::object(vec![ + ("name", Value::string("R2-D2")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_hero_name_and_friends() { + let doc = r#" + { + hero { + id + name + friends { + name + } + } + }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("hero", Value::object(vec![ + ("id", Value::string("2001")), + ("name", Value::string("R2-D2")), + ("friends", Value::list(vec![ + Value::object(vec![ + ("name", Value::string("Luke Skywalker")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("Han Solo")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("Leia Organa")), + ].into_iter().collect()), + ])), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_hero_name_and_friends_and_friends_of_friends() { + let doc = r#" + { + hero { + id + name + friends { + name + appearsIn + friends { + name + } + } + } + }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("hero", Value::object(vec![ + ("id", Value::string("2001")), + ("name", Value::string("R2-D2")), + ("friends", Value::list(vec![ + Value::object(vec![ + ("name", Value::string("Luke Skywalker")), + ("appearsIn", Value::list(vec![ + Value::string("NEW_HOPE"), + Value::string("EMPIRE"), + Value::string("JEDI"), + ])), + ("friends", Value::list(vec![ + Value::object(vec![ + ("name", Value::string("Han Solo")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("Leia Organa")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("C-3PO")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("R2-D2")), + ].into_iter().collect()), + ])), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("Han Solo")), + ("appearsIn", Value::list(vec![ + Value::string("NEW_HOPE"), + Value::string("EMPIRE"), + Value::string("JEDI"), + ])), + ("friends", Value::list(vec![ + Value::object(vec![ + ("name", Value::string("Luke Skywalker")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("Leia Organa")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("R2-D2")), + ].into_iter().collect()), + ])), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("Leia Organa")), + ("appearsIn", Value::list(vec![ + Value::string("NEW_HOPE"), + Value::string("EMPIRE"), + Value::string("JEDI"), + ])), + ("friends", Value::list(vec![ + Value::object(vec![ + ("name", Value::string("Luke Skywalker")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("Han Solo")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("C-3PO")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("R2-D2")), + ].into_iter().collect()), + ])), + ].into_iter().collect()), + ])), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_query_name() { + let doc = r#"{ human(id: "1000") { name } }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("human", Value::object(vec![ + ("name", Value::string("Luke Skywalker")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_query_alias_single() { + let doc = r#"{ luke: human(id: "1000") { name } }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("luke", Value::object(vec![ + ("name", Value::string("Luke Skywalker")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_query_alias_multiple() { + let doc = r#" + { + luke: human(id: "1000") { name } + leia: human(id: "1003") { name } + }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("luke", Value::object(vec![ + ("name", Value::string("Luke Skywalker")), + ].into_iter().collect())), + ("leia", Value::object(vec![ + ("name", Value::string("Leia Organa")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_query_alias_multiple_with_fragment() { + let doc = r#" + query UseFragment { + luke: human(id: "1000") { ...HumanFragment } + leia: human(id: "1003") { ...HumanFragment } + } + + fragment HumanFragment on Human { + name + homePlanet + }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("luke", Value::object(vec![ + ("name", Value::string("Luke Skywalker")), + ("homePlanet", Value::string("Tatooine")), + ].into_iter().collect())), + ("leia", Value::object(vec![ + ("name", Value::string("Leia Organa")), + ("homePlanet", Value::string("Alderaan")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_query_name_variable() { + let doc = r#"query FetchSomeIDQuery($someId: String!) { human(id: $someId) { name } }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + let vars = vec![ + ("someId".to_owned(), InputValue::string("1000")), + ].into_iter().collect(); + + assert_eq!( + ::execute(doc, None, &schema, &vars, &database), + Ok((Value::object(vec![ + ("human", Value::object(vec![ + ("name", Value::string("Luke Skywalker")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_query_name_invalid_variable() { + let doc = r#"query FetchSomeIDQuery($someId: String!) { human(id: $someId) { name } }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + let vars = vec![ + ("someId".to_owned(), InputValue::string("some invalid id")), + ].into_iter().collect(); + + assert_eq!( + ::execute(doc, None, &schema, &vars, &database), + Ok((Value::object(vec![ + ("human", Value::null()), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_query_friends_names() { + let doc = r#"{ human(id: "1000") { friends { name } } }"#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("human", Value::object(vec![ + ("friends", Value::list(vec![ + Value::object(vec![ + ("name", Value::string("Han Solo")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("Leia Organa")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("C-3PO")), + ].into_iter().collect()), + Value::object(vec![ + ("name", Value::string("R2-D2")), + ].into_iter().collect()), + ])), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_query_inline_fragments_droid() { + let doc = r#" + query InlineFragments { + hero { + name + __typename + + ...on Droid { + primaryFunction + } + } + } + "#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("hero", Value::object(vec![ + ("__typename", Value::string("Droid")), + ("name", Value::string("R2-D2")), + ("primaryFunction", Value::string("Astromech")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} + +#[test] +fn test_query_inline_fragments_human() { + let doc = r#" + query InlineFragments { + hero(episode: EMPIRE) { + name + __typename + } + } + "#; + let database = Database::new(); + let schema = RootNode::new(&database, ()); + + assert_eq!( + ::execute(doc, None, &schema, &HashMap::new(), &database), + Ok((Value::object(vec![ + ("hero", Value::object(vec![ + ("__typename", Value::string("Human")), + ("name", Value::string("Luke Skywalker")), + ].into_iter().collect())), + ].into_iter().collect()), + vec![]))); +} diff --git a/src/tests/schema.rs b/src/tests/schema.rs new file mode 100644 index 00000000..aa5c3a2b --- /dev/null +++ b/src/tests/schema.rs @@ -0,0 +1,105 @@ +use types::schema::FieldResult; +use tests::model::{Character, Human, Droid, Database, Episode}; + +graphql_enum!(Episode { + Episode::NewHope => "NEW_HOPE", + Episode::Empire => "EMPIRE", + Episode::Jedi => "JEDI", +}); + +graphql_interface!(<'a> &'a Character: Database as "Character" |&self| { + description: "A character in the Star Wars Trilogy" + + field id() -> FieldResult<&str> as "The id of the character" { + Ok(self.id()) + } + + field name() -> FieldResult> as "The name of the character" { + Ok(Some(self.name())) + } + + field friends(&mut executor) -> FieldResult> + as "The friends of the character" { + Ok(executor.context().get_friends(self.as_character())) + } + + field appears_in() -> FieldResult<&[Episode]> as "Which movies they appear in" { + Ok(self.appears_in()) + } + + instance_resolvers: |&context| [ + context.get_human(&self.id()), + context.get_droid(&self.id()), + ] +}); + +graphql_object!(<'a> &'a Human: Database as "Human" |&self| { + description: "A humanoid creature in the Star Wars universe." + + interfaces: [&Character] + + field id() -> FieldResult<&str> as "The id of the human"{ + Ok(self.id()) + } + + field name() -> FieldResult> as "The name of the human" { + Ok(Some(self.name())) + } + + field friends(&mut executor) -> FieldResult> + as "The friends of the human" { + Ok(executor.context().get_friends(self.as_character())) + } + + field home_planet() -> FieldResult<&Option> as "The home planet of the human" { + Ok(self.home_planet()) + } +}); + +graphql_object!(<'a> &'a Droid: Database as "Droid" |&self| { + description: "A mechanical creature in the Star Wars universe." + + interfaces: [&Character] + + field id() -> FieldResult<&str> as "The id of the droid" { + Ok(self.id()) + } + + field name() -> FieldResult> as "The name of the droid" { + Ok(Some(self.name())) + } + + field friends(&mut executor) -> FieldResult> + as "The friends of the droid" { + Ok(executor.context().get_friends(self.as_character())) + } + + field primary_function() -> FieldResult<&Option> as "The primary function of the droid" { + Ok(self.primary_function()) + } +}); + + +graphql_object!(Database: Database as "Query" |&self| { + description: "The root query object of the schema" + + field human( + id: String as "id of the human" + ) -> FieldResult> { + Ok(self.get_human(&id)) + } + + field droid( + id: String as "id of the droid" + ) -> FieldResult> { + Ok(self.get_droid(&id)) + } + + field hero( + episode: Option as + "If omitted, returns the hero of the whole saga. If provided, returns \ + the hero of that particular episode" + ) -> FieldResult> { + Ok(Some(self.get_hero(episode).as_character())) + } +}); diff --git a/src/types/base.rs b/src/types/base.rs new file mode 100644 index 00000000..79e4a948 --- /dev/null +++ b/src/types/base.rs @@ -0,0 +1,404 @@ +use std::collections::HashMap; + +use ast::{InputValue, Selection, Directive, FromInputValue}; +use value::Value; + +use schema::meta::{Argument, MetaType}; +use types::schema::{Executor, Registry, ExecutionResult}; +use parser::Spanning; + +/// GraphQL type kind +/// +/// The GraphQL specification defines a number of type kinds - the meta type +/// of a type. +#[derive(Clone, Eq, PartialEq, Debug)] +pub enum TypeKind { + /// ## Scalar types + /// + /// Scalar types appear as the leaf nodes of GraphQL queries. Strings, + /// numbers, and booleans are the built in types, and while it's possible + /// to define your own, it's relatively uncommon. + Scalar, + + /// ## Object types + /// + /// The most common type to be implemented by users. Objects have fields + /// and can implement interfaces. + Object, + + /// ## Interface types + /// + /// Interface types are used to represent overlapping fields between + /// multiple types, and can be queried for their concrete type. + Interface, + + /// ## Union types + /// + /// Unions are similar to interfaces but can not contain any fields on + /// their own. + Union, + + /// ## Enum types + /// + /// Like scalars, enum types appear as the leaf nodes of GraphQL queries. + Enum, + + /// ## Input objects + /// + /// Represents complex values provided in queries _into_ the system. + InputObject, + + /// ## List types + /// + /// Represent lists of other types. This library provides implementations + /// for vectors and slices, but other Rust types can be extended to serve + /// as GraphQL lists. + List, + + /// ## Non-null types + /// + /// In GraphQL, nullable types are the default. By putting a `!` after a + /// type, it becomes non-nullable. + NonNull, +} + +/// Field argument container +pub struct Arguments { + args: Option>, +} + +impl Arguments { + #[doc(hidden)] + pub fn new(mut args: Option>, meta_args: &Option>) -> Arguments { + if meta_args.is_some() && args.is_none() { + args = Some(HashMap::new()); + } + + if let (&mut Some(ref mut args), &Some(ref meta_args)) = (&mut args, meta_args) { + for arg in meta_args { + if !args.contains_key(&arg.name) { + if let Some(ref default_value) = arg.default_value { + args.insert(arg.name.clone(), default_value.clone()); + } + } + } + } + + Arguments { + args: args + } + } + + /// Get and convert an argument into the desired type. + /// + /// If the argument is found, or a default argument has been provided, + /// the `InputValue` will be converted into the type `T`. + /// + /// Returns `Some` if the argument is present _and_ type conversion + /// succeeeds. + pub fn get(&self, key: &str) -> Option where T: FromInputValue { + match self.args { + Some(ref args) => match args.get(key) { + Some(v) => Some(v.convert().unwrap()), + None => None, + }, + None => None, + } + } +} + +/** +Primary trait used to expose Rust types in a GraphQL schema + +All of the convenience macros ultimately expand into an implementation of +this trait for the given type. The macros remove duplicated definitions of +fields and arguments, and add type checks on all resolve functions +automatically. This can all be done manually. + +`GraphQLType` provides _some_ convenience methods for you, in the form of +optional trait methods. The `name` and `meta` methods are mandatory, but +other than that, it depends on what type you're exposing: + +* Scalars, enums, lists and non null wrappers only require `resolve`, +* Interfaces and objects require `resolve_field` _or_ `resolve` if you want + to implement custom resolution logic (probably not), +* Interfaces and unions require `resolve_into_type` and `concrete_type_name`. +* Input objects do not require anything + +## Example + +Manually deriving an object is straightforward but tedious. This is the +equivalent of the `User` object as shown in the example in the documentation +root: + +```rust +use juniper::{GraphQLType, Registry, FieldResult, + Arguments, Executor, ExecutionResult}; +use juniper::meta::MetaType; +# use std::collections::HashMap; + +struct User { id: String, name: String, friend_ids: Vec } +struct Database { users: HashMap } + +impl GraphQLType for User { + fn name() -> Option<&'static str> { + Some("User") + } + + fn meta(registry: &mut Registry) -> MetaType { + // First, we need to define all fields and their types on this type. + // + // If need arguments, want to implement interfaces, or want to add + // documentation strings, we can do it here. + registry.build_object_type::()(&[ + registry.field::<&String>("id"), + registry.field::<&String>("name"), + registry.field::>("friends"), + ]) + .into_meta() + } + + fn resolve_field( + &self, + field_name: &str, + args: &Arguments, + executor: &mut Executor + ) + -> ExecutionResult + { + // Next, we need to match the queried field name. All arms of this + // match statement return `ExecutionResult`, which makes it hard to + // statically verify that the type you pass on to `executor.resolve` + // actually matches the one that you defined in `meta()` above. + let database = executor.context(); + match field_name { + "id" => executor.resolve(&self.id), + "name" => executor.resolve(&self.name), + + // You pass a vector of User objects to `executor.resolve`, and it + // will determine which fields of the sub-objects to actually + // resolve based on the query. The executor instance keeps track + // of its current position in the query. + "friends" => executor.resolve( + &self.friend_ids.iter() + .filter_map(|id| database.users.get(id)) + .collect::>() + ), + + // We can only reach this panic in two cases; either a mismatch + // between the defined schema in `meta()` above, or a validation + // in this library failed because of a bug. + // + // In either of those two cases, the only reasonable way out is + // to panic the thread. + _ => panic!("Field {} not found on type User", field_name), + } + } +} +``` + +*/ +pub trait GraphQLType: Sized { + /// The name of the GraphQL type to expose. + /// + /// This function will be called multiple times during schema construction. + /// It must _not_ perform any calculation and _always_ return the same + /// value. + fn name() -> Option<&'static str>; + + /// The meta type representing this GraphQL type. + fn meta(registry: &mut Registry) -> MetaType; + + /// Resolve the value of a single field on this type. + /// + /// The arguments object contain all specified arguments, with default + /// values substituted for the ones not provided by the query. + /// + /// The executor can be used to drive selections into sub-objects. + /// + /// The default implementation panics through `unimplemented!()`. + #[allow(unused_variables)] + fn resolve_field(&self, field_name: &str, arguments: &Arguments, executor: &mut Executor) + -> ExecutionResult + { + unimplemented!() + } + + /// Resolve this interface or union into a concrete type + /// + /// Try to resolve the current type into the type name provided. If the + /// type matches, pass the instance along to `executor.resolve`. + /// + /// The default implementation panics through `unimplemented()`. + #[allow(unused_variables)] + fn resolve_into_type(&self, type_name: &str, selection_set: Option>, executor: &mut Executor) -> ExecutionResult { + unimplemented!(); + } + + /// Return the concrete type name for this instance/union. + /// + /// The default implementation panics through `unimplemented()`. + #[allow(unused_variables)] + fn concrete_type_name(&self, context: &CtxT) -> String { + unimplemented!(); + } + + /// Resolve the provided selection set against the current object. + /// + /// For non-object types, the selection set will be `None` and the value + /// of the object should simply be returned. + /// + /// For objects, all fields in the selection set should be resolved. + /// + /// The default implementation uses `resolve_field` to resolve all fields, + /// including those through fragment expansion, for object types. For + /// non-object types, this method panics through `unimplemented!()`. + fn resolve(&self, selection_set: Option>, executor: &mut Executor) -> Value { + if let Some(selection_set) = selection_set { + let mut result = HashMap::new(); + resolve_selection_set_into(self, selection_set, executor, &mut result); + Value::object(result) + } + else { + unimplemented!(); + } + } +} + +fn resolve_selection_set_into( + instance: &T, + selection_set: Vec, + executor: &mut Executor, + result: &mut HashMap) + where T: GraphQLType +{ + let meta_type = executor.schema() + .concrete_type_by_name(T::name().expect("Resolving named type's selection set")) + .expect("Type not found in schema"); + + for selection in selection_set { + match selection { + Selection::Field(Spanning { item: f, start: start_pos, .. }) => { + if is_excluded( + &match f.directives { + Some(sel) => Some(sel.iter().cloned().map(|s| s.item).collect()), + None => None, + }, + executor.variables()) { + continue; + } + + let response_name = &f.alias.as_ref().unwrap_or(&f.name).item; + + if &f.name.item == "__typename" { + result.insert( + response_name.clone(), + Value::string( + instance.concrete_type_name(executor.context()))); + continue; + } + + let meta_field = meta_type.field_by_name(&f.name.item) + .expect(&format!("Field {} not found on type {:?}", f.name.item, meta_type.name())); + + let exec_vars = executor.variables(); + + let mut sub_exec = executor.sub_executor( + Some(response_name.clone()), + start_pos.clone(), + f.selection_set); + + let field_result = instance.resolve_field( + &f.name.item, + &Arguments::new( + f.arguments.map(|m| + m.item.into_iter().map(|(k, v)| + (k.item, v.item.into_const(exec_vars))).collect()), + &meta_field.arguments), + &mut sub_exec); + + match field_result { + Ok(v) => { result.insert(response_name.clone(), v); } + Err(e) => { sub_exec.push_error(e, start_pos); } + } + }, + Selection::FragmentSpread(Spanning { item: spread, .. }) => { + if is_excluded( + &match spread.directives { + Some(sel) => Some(sel.iter().cloned().map(|s| s.item).collect()), + None => None, + }, + executor.variables()) { + continue; + } + + let fragment = &executor.fragment_by_name(&spread.name.item) + .expect("Fragment could not be found"); + + resolve_selection_set_into( + instance, fragment.selection_set.clone(), executor, result); + }, + Selection::InlineFragment(Spanning { item: fragment, start: start_pos, .. }) => { + if is_excluded( + &match fragment.directives { + Some(sel) => Some(sel.iter().cloned().map(|s| s.item).collect()), + None => None + }, + executor.variables()) { + continue; + } + + let mut sub_exec = executor.sub_executor( + None, + start_pos.clone(), + Some(fragment.selection_set.clone())); + + if let Some(type_condition) = fragment.type_condition { + let sub_result = instance.resolve_into_type( + &type_condition.item, + Some(fragment.selection_set.clone()), + &mut sub_exec); + + if let Ok(Value::Object(mut hash_map)) = sub_result { + for (k, v) in hash_map.drain() { + result.insert(k, v); + } + } + else if let Err(e) = sub_result { + sub_exec.push_error(e, start_pos); + } + } + else { + resolve_selection_set_into( + instance, + fragment.selection_set.clone(), + &mut sub_exec, + result); + } + }, + } + } +} + +fn is_excluded(directives: &Option>, vars: &HashMap) -> bool { + if let Some(ref directives) = *directives { + for directive in directives { + let condition: bool = directive.arguments.iter() + .flat_map(|m| m.item.get("if")) + .flat_map(|v| v.item.clone().into_const(vars).convert()) + .next().unwrap(); + + if directive.name.item == "skip" && condition { + return true + } + else if directive.name.item == "include" && !condition { + return true + } + } + + false + } + else { + false + } +} diff --git a/src/types/containers.rs b/src/types/containers.rs new file mode 100644 index 00000000..92d56e78 --- /dev/null +++ b/src/types/containers.rs @@ -0,0 +1,108 @@ +use ast::{InputValue, ToInputValue, FromInputValue, Selection}; +use value::Value; +use schema::meta::MetaType; + +use types::schema::{Executor, Registry}; +use types::base::{GraphQLType}; + +impl GraphQLType for Option where T: GraphQLType { + fn name() -> Option<&'static str> { + None + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_nullable_type::().into_meta() + } + + fn resolve(&self, _: Option>, executor: &mut Executor) -> Value { + match *self { + Some(ref obj) => executor.resolve_into_value(obj), + None => Value::null(), + } + } +} + +impl FromInputValue for Option where T: FromInputValue { + fn from(v: &InputValue) -> Option> { + match v { + &InputValue::Null => None, + v => match v.convert() { + Some(x) => Some(Some(x)), + None => None, + } + } + } +} + +impl ToInputValue for Option where T: ToInputValue { + fn to(&self) -> InputValue { + match *self { + Some(ref v) => v.to(), + None => InputValue::null(), + } + } +} + + +impl GraphQLType for Vec where T: GraphQLType { + fn name() -> Option<&'static str> { + None + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_list_type::().into_meta() + } + + fn resolve(&self, _: Option>, executor: &mut Executor) -> Value { + Value::list( + self.iter().map(|e| executor.resolve_into_value(e)).collect() + ) + } +} + +impl FromInputValue for Vec where T: FromInputValue { + fn from(v: &InputValue) -> Option> { + match *v { + InputValue::List(ref ls) => { + let v: Vec<_> = ls.iter().filter_map(|i| i.item.convert()).collect(); + + if v.len() == ls.len() { + Some(v) + } + else { + None + } + }, + _ => None, + } + } +} + +impl ToInputValue for Vec where T: ToInputValue { + fn to(&self) -> InputValue { + InputValue::list(self.iter().map(|v| v.to()).collect()) + } +} + + +impl<'a, T, CtxT> GraphQLType for &'a [T] where T: GraphQLType { + fn name() -> Option<&'static str> { + None + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_list_type::().into_meta() + } + + fn resolve(&self, _: Option>, executor: &mut Executor) -> Value { + Value::list( + self.iter().map(|e| executor.resolve_into_value(e)).collect() + ) + } +} + +impl<'a, T> ToInputValue for &'a [T] where T: ToInputValue { + fn to(&self) -> InputValue { + InputValue::list(self.iter().map(|v| v.to()).collect()) + } +} diff --git a/src/types/mod.rs b/src/types/mod.rs new file mode 100644 index 00000000..568a6150 --- /dev/null +++ b/src/types/mod.rs @@ -0,0 +1,8 @@ +pub mod base; +pub mod schema; +pub mod scalars; +pub mod pointers; +pub mod containers; +pub mod utilities; + +pub use self::schema::execute_validated_query; diff --git a/src/types/pointers.rs b/src/types/pointers.rs new file mode 100644 index 00000000..63f65b70 --- /dev/null +++ b/src/types/pointers.rs @@ -0,0 +1,73 @@ +use ast::{Selection, InputValue, ToInputValue, FromInputValue}; +use value::Value; + +use schema::meta::MetaType; +use types::schema::{Executor, Registry, ExecutionResult}; +use types::base::{Arguments, GraphQLType}; + +impl GraphQLType for Box where T: GraphQLType { + fn name() -> Option<&'static str> { + T::name() + } + + fn meta(registry: &mut Registry) -> MetaType { + T::meta(registry) + } + + fn resolve_into_type(&self, name: &str, selection_set: Option>, executor: &mut Executor) -> ExecutionResult { + (**self).resolve_into_type(name, selection_set, executor) + } + + fn resolve_field(&self, field: &str, args: &Arguments, executor: &mut Executor) -> ExecutionResult + { + (**self).resolve_field(field, args, executor) + } + + fn resolve(&self, selection_set: Option>, executor: &mut Executor) -> Value { + (**self).resolve(selection_set, executor) + } +} + +impl FromInputValue for Box where T: FromInputValue { + fn from(v: &InputValue) -> Option> { + match ::from(v) { + Some(v) => Some(Box::new(v)), + None => None, + } + } +} + +impl ToInputValue for Box where T: ToInputValue { + fn to(&self) -> InputValue { + (**self).to() + } +} + +impl<'a, T, CtxT> GraphQLType for &'a T where T: GraphQLType { + fn name() -> Option<&'static str> { + T::name() + } + + fn meta(registry: &mut Registry) -> MetaType { + T::meta(registry) + } + + fn resolve_into_type(&self, name: &str, selection_set: Option>, executor: &mut Executor) -> ExecutionResult { + (**self).resolve_into_type(name, selection_set, executor) + } + + fn resolve_field(&self, field: &str, args: &Arguments, executor: &mut Executor) -> ExecutionResult + { + (**self).resolve_field(field, args, executor) + } + + fn resolve(&self, selection_set: Option>, executor: &mut Executor) -> Value { + (**self).resolve(selection_set, executor) + } +} + +impl<'a, T> ToInputValue for &'a T where T: ToInputValue { + fn to(&self) -> InputValue { + (**self).to() + } +} diff --git a/src/types/scalars.rs b/src/types/scalars.rs new file mode 100644 index 00000000..4f7b2074 --- /dev/null +++ b/src/types/scalars.rs @@ -0,0 +1,121 @@ +use ast::{InputValue, Selection, FromInputValue, ToInputValue}; +use value::Value; + +use schema::meta::MetaType; + +use types::schema::{Executor, Registry}; +use types::base::GraphQLType; + +/// An ID as defined by the GraphQL specification +/// +/// Represented as a string, but can be converted _to_ from an integer as well. +pub struct ID(String); + +graphql_scalar!(ID as "ID" { + resolve(&self) -> Value { + Value::string(&self.0) + } + + from_input_value(v: &InputValue) -> Option { + match *v { + InputValue::String(ref s) => Some(ID(s.to_owned())), + InputValue::Int(i) => Some(ID(format!("{}", i))), + _ => None + } + } +}); + + +graphql_scalar!(String as "String" { + resolve(&self) -> Value { + Value::string(self) + } + + from_input_value(v: &InputValue) -> Option { + match *v { + InputValue::String(ref s) => Some(s.clone()), + _ => None, + } + } +}); + + +impl<'a, CtxT> GraphQLType for &'a str { + fn name() -> Option<&'static str> { + Some("String") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_scalar_type::().into_meta() + } + + fn resolve(&self, _: Option>, _: &mut Executor) -> Value { + Value::string(self) + } +} + +impl<'a> ToInputValue for &'a str { + fn to(&self) -> InputValue { + InputValue::string(self) + } +} + + +graphql_scalar!(bool as "Boolean" { + resolve(&self) -> Value { + Value::boolean(*self) + } + + from_input_value(v: &InputValue) -> Option { + match *v { + InputValue::Boolean(b) => Some(b), + _ => None, + } + } +}); + + +graphql_scalar!(i64 as "Int" { + resolve(&self) -> Value { + Value::int(*self) + } + + from_input_value(v: &InputValue) -> Option { + match *v { + InputValue::Int(i) => Some(i), + _ => None, + } + } +}); + + +graphql_scalar!(f64 as "Float" { + resolve(&self) -> Value { + Value::float(*self) + } + + from_input_value(v: &InputValue) -> Option { + match *v { + InputValue::Int(i) => Some(i as f64), + InputValue::Float(f) => Some(f), + _ => None, + } + } +}); + + +impl GraphQLType for () { + fn name() -> Option<&'static str> { + Some("__Unit") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_scalar_type::().into_meta() + } +} + +impl FromInputValue for () { + fn from(_: &InputValue) -> Option<()> { + None + } +} diff --git a/src/types/schema.rs b/src/types/schema.rs new file mode 100644 index 00000000..2c0c4c98 --- /dev/null +++ b/src/types/schema.rs @@ -0,0 +1,421 @@ +use std::collections::HashMap; +use std::marker::PhantomData; + +use ast::{InputValue, ToInputValue, Document, Selection, Fragment, Definition, Type, FromInputValue, OperationType}; +use value::Value; +use parser::SourcePosition; + +use schema::meta::{MetaType, ScalarMeta, ListMeta, NullableMeta, + ObjectMeta, EnumMeta, InterfaceMeta, UnionMeta, + InputObjectMeta, PlaceholderMeta, Field, Argument, + EnumValue}; +use schema::model::{RootNode, SchemaType}; + +use types::base::GraphQLType; + +/// A type registry used to build schemas +/// +/// The registry gathers metadata for all types in a schema. It provides +/// convenience methods to convert types implementing the `GraphQLType` trait +/// into `Type` instances and automatically registers them. +pub struct Registry { + /// Currently registered types + pub types: HashMap, + phantom: PhantomData, +} + +#[derive(Clone)] +pub enum FieldPath<'a> { + Root(SourcePosition), + Field(String, SourcePosition, &'a FieldPath<'a>), +} + +/// Query execution engine +/// +/// The executor helps drive the query execution in a schema. It keeps track +/// of the current field stack, context, variables, and errors. +pub struct Executor<'a, CtxT> where CtxT: 'a { + fragments: &'a HashMap, + variables: &'a HashMap, + current_selection_set: Option>, + schema: &'a SchemaType, + context: &'a CtxT, + errors: &'a mut Vec, + field_path: FieldPath<'a>, +} + +/// Error type for errors that occur during query execution +/// +/// All execution errors contain the source position in the query of the field +/// that failed to resolve. It also contains the field stack. +#[derive(Debug, PartialOrd, Ord, PartialEq, Eq)] +pub struct ExecutionError { + location: SourcePosition, + path: Vec, + message: String, +} + +/// The result of resolving the value of a field of type `T` +pub type FieldResult = Result; + +/// The result of resolving an unspecified field +pub type ExecutionResult = Result; + +impl<'a, CtxT> Executor<'a, CtxT> { + /// Resolve a single arbitrary value into an `ExecutionResult` + pub fn resolve>(&mut self, value: &T) -> ExecutionResult { + Ok(value.resolve( + match self.current_selection_set { + Some(ref sel) => Some(sel.clone()), + None => None, + }, + self)) + } + + /// Resolve a single arbitrary value into a return value + /// + /// If the field fails to resolve, `null` will be returned. + pub fn resolve_into_value>(&mut self, value: &T) -> Value { + match self.resolve(value) { + Ok(v) => v, + Err(e) => { + let position = self.field_path.location().clone(); + self.push_error(e, position); + Value::null() + }, + } + } + + /// Derive a new executor by replacing the context + /// + /// This can be used to connect different types, e.g. from different Rust + /// libraries, that require different context types. + pub fn replaced_context<'b, NewCtxT>(&'b mut self, ctx: &'b NewCtxT) -> Executor<'b, NewCtxT> { + Executor { + fragments: self.fragments, + variables: self.variables, + current_selection_set: self.current_selection_set.clone(), + schema: self.schema, + context: ctx, + errors: self.errors, + field_path: self.field_path.clone(), + } + } + + #[doc(hidden)] + pub fn sub_executor( + &mut self, + field_name: Option, + location: SourcePosition, + selection_set: Option>, + ) + -> Executor + { + Executor { + fragments: self.fragments, + variables: self.variables, + current_selection_set: selection_set, + schema: self.schema, + context: self.context, + errors: self.errors, + field_path: match field_name { + Some(name) => FieldPath::Field(name, location, &self.field_path), + None => self.field_path.clone(), + }, + } + } + + /// Access the current context + /// + /// You usually provide the context when calling the top-level `execute` + /// function, or using the context factory in the Iron integration. + pub fn context(&self) -> &'a CtxT { + self.context + } + + /// The currently executing schema + pub fn schema(&self) -> &'a SchemaType { + self.schema + } + + #[doc(hidden)] + pub fn variables(&self) -> &'a HashMap { + self.variables + } + + #[doc(hidden)] + pub fn fragment_by_name(&self, name: &str) -> Option<&'a Fragment> { + self.fragments.get(name) + } + + /// Add an error to the execution engine + pub fn push_error(&mut self, error: String, location: SourcePosition) { + let mut path = Vec::new(); + self.field_path.construct_path(&mut path); + + self.errors.push(ExecutionError { + location: location, + path: path, + message: error, + }); + } +} + +impl<'a> FieldPath<'a> { + fn construct_path(&self, acc: &mut Vec) { + match *self { + FieldPath::Root(_) => (), + FieldPath::Field(ref name, _, ref parent) => { + parent.construct_path(acc); + acc.push(name.clone()); + } + } + } + + fn location(&self) -> &SourcePosition { + match *self { + FieldPath::Root(ref pos) | + FieldPath::Field(_, ref pos, _) => pos + } + } +} + +impl ExecutionError { + /// The error message + pub fn message(&self) -> &str { + &self.message + } + + /// The source location _in the query_ of the field that failed to resolve + pub fn location(&self) -> &SourcePosition { + &self.location + } + + /// The path of fields leading to the field that generated this error + pub fn path(&self) -> &[String] { + &self.path + } +} + +pub fn execute_validated_query( + document: Document, + operation_name: Option<&str>, + root_node: &RootNode, + variables: &HashMap, + context: &CtxT +) + -> (Value, Vec) + where QueryT: GraphQLType, + MutationT: GraphQLType, +{ + let mut fragments = vec![]; + let mut operation = None; + + for def in document { + match def { + Definition::Operation(op) => { + if operation_name.is_none() && operation.is_some() { + panic!("Must provide operation name if query contains multiple operations"); + } + + let move_op = operation_name.is_none() + || op.item.name.as_ref().map(|s| s.item.as_ref()) == operation_name; + + if move_op { + operation = Some(op); + } + } + Definition::Fragment(f) => fragments.push(f), + }; + } + + let op = operation.expect("Could not find operation to execute"); + let mut errors = Vec::new(); + let value; + + { + let mut executor = Executor { + fragments: &fragments.into_iter().map(|f| (f.item.name.item.clone(), f.item)).collect(), + variables: variables, + current_selection_set: Some(op.item.selection_set), + schema: &root_node.schema, + context: context, + errors: &mut errors, + field_path: FieldPath::Root(op.start), + }; + + value = match op.item.operation_type { + OperationType::Query => executor.resolve_into_value(&root_node), + OperationType::Mutation => executor.resolve_into_value(&root_node.mutation_type), + }; + } + + errors.sort(); + + (value, errors) +} + +impl Registry { + /// Construct a new registry + pub fn new(types: HashMap) -> Registry { + Registry { + types: types, + phantom: PhantomData, + } + } + + /// Get the `Type` instance for a given GraphQL type + /// + /// If the registry hasn't seen a type with this name before, it will + /// construct its metadata and store it. + pub fn get_type(&mut self) -> Type where T: GraphQLType { + if let Some(name) = T::name() { + if !self.types.contains_key(name) { + self.insert_placeholder(name, Type::NonNullNamed(name.to_owned())); + let meta = T::meta(self); + self.types.insert(name.to_owned(), meta); + } + self.types[name].as_type() + } + else { + T::meta(self).as_type() + } + } + + /// Create a field with the provided name + pub fn field(&mut self, name: &str) -> Field where T: GraphQLType { + Field { + name: name.to_owned(), + description: None, + arguments: None, + field_type: self.get_type::(), + deprecation_reason: None, + } + } + + #[doc(hidden)] + pub fn field_inside_result(&mut self, name: &str, _: FieldResult) -> Field where T: GraphQLType { + Field { + name: name.to_owned(), + description: None, + arguments: None, + field_type: self.get_type::(), + deprecation_reason: None, + } + } + + /// Create an argument with the provided name + pub fn arg(&mut self, name: &str) -> Argument where T: GraphQLType + FromInputValue { + Argument::new(name, self.get_type::()) + } + + /// Create an argument with a default value + /// + /// When called with type `T`, the actual argument will be given the type + /// `Option`. + pub fn arg_with_default( + &mut self, + name: &str, + value: &T, + ) + -> Argument + where T: GraphQLType + ToInputValue + FromInputValue + { + Argument::new(name, self.get_type::>()) + .default_value(value.to()) + } + + fn insert_placeholder(&mut self, name: &str, of_type: Type) { + if !self.types.contains_key(name) { + self.types.insert( + name.to_owned(), + MetaType::Placeholder(PlaceholderMeta { of_type: of_type })); + } + } + + /// Create a scalar meta type + /// + /// This expects the type to implement `FromInputValue`. + pub fn build_scalar_type(&mut self) + -> ScalarMeta + where T: FromInputValue + GraphQLType + { + let name = T::name().expect("Scalar types must be named. Implement name()"); + ScalarMeta::new::(name) + } + + /// Create a list meta type + pub fn build_list_type>(&mut self) -> ListMeta { + let of_type = self.get_type::(); + ListMeta::new(of_type) + } + + /// Create a nullable meta type + pub fn build_nullable_type>(&mut self) -> NullableMeta { + let of_type = self.get_type::(); + NullableMeta::new(of_type) + } + + /// Create an object meta type builder + /// + /// To prevent infinite recursion by enforcing ordering, this returns a + /// function that needs to be called with the list of fields on the object. + pub fn build_object_type(&mut self) + -> Box ObjectMeta> + where T: GraphQLType + { + let name = T::name().expect("Object types must be named. Implement name()"); + let typename_field = self.field::("__typename"); + + Box::new(move |fs: &[Field]| { + let mut v = fs.to_vec(); + v.push(typename_field.clone()); + ObjectMeta::new(name, &v) + }) + } + + /// Create an enum meta type + pub fn build_enum_type(&mut self) + -> Box EnumMeta> + where T: FromInputValue + GraphQLType + { + let name = T::name().expect("Enum types must be named. Implement name()"); + + Box::new(move |values: &[EnumValue]| EnumMeta::new::(name, values)) + } + + /// Create an interface meta type builder + pub fn build_interface_type(&mut self) + -> Box InterfaceMeta> + where T: GraphQLType + { + let name = T::name().expect("Interface types must be named. Implement name()"); + let typename_field = self.field::("__typename"); + + Box::new(move |fs: &[Field]| { + let mut v = fs.to_vec(); + v.push(typename_field.clone()); + InterfaceMeta::new(name, &v) + }) + } + + /// Create a union meta type builder + pub fn build_union_type(&mut self) + -> Box UnionMeta> + where T: GraphQLType + { + let name = T::name().expect("Union types must be named. Implement name()"); + + Box::new(move |ts: &[Type]| UnionMeta::new(name, ts)) + } + + /// Create an input object meta type builder + pub fn build_input_object_type(&mut self) + -> Box InputObjectMeta> + where T: FromInputValue + GraphQLType + { + let name = T::name().expect("Input object types must be named. Implement name()"); + + Box::new(move |args: &[Argument]| InputObjectMeta::new::(name, args)) + } +} diff --git a/src/types/utilities.rs b/src/types/utilities.rs new file mode 100644 index 00000000..d2b16dab --- /dev/null +++ b/src/types/utilities.rs @@ -0,0 +1,67 @@ +use std::collections::HashSet; +use ast::InputValue; +use schema::model::{SchemaType, TypeType}; +use schema::meta::{MetaType, InputObjectMeta}; + +pub fn is_valid_literal_value(schema: &SchemaType, arg_type: &TypeType, arg_value: &InputValue) -> bool { + match *arg_type { + TypeType::NonNull(ref inner) => { + if arg_value.is_null() { + false + } + else { + is_valid_literal_value(schema, inner, arg_value) + } + } + TypeType::List(ref inner) => { + match *arg_value { + InputValue::List(ref items) => items.iter().all(|i| is_valid_literal_value(schema, inner, &i.item)), + ref v => is_valid_literal_value(schema, inner, v), + } + } + TypeType::Concrete(t) => { + match *arg_value { + ref v @ InputValue::Null | + ref v @ InputValue::Int(_) | + ref v @ InputValue::Float(_) | + ref v @ InputValue::String(_) | + ref v @ InputValue::Boolean(_) | + ref v @ InputValue::Enum(_) => { + if let Some(ref parse_fn) = t.input_value_parse_fn() { + parse_fn(&v) + } else { + false + } + }, + InputValue::List(_) => false, + InputValue::Variable(_) => true, + InputValue::Object(ref obj) => { + if let &MetaType::InputObject(InputObjectMeta { ref input_fields, .. }) = t { + let mut remaining_required_fields = input_fields.iter() + .filter_map(|f| if f.arg_type.is_non_null() { Some(&f.name) } else { None }) + .collect::>(); + + let all_types_ok = obj.iter().all(|&(ref key, ref value)| { + remaining_required_fields.remove(&key.item); + if let Some(ref arg_type) = input_fields.iter() + .filter(|f| f.name == key.item) + .map(|f| schema.make_type(&f.arg_type)) + .next() + { + is_valid_literal_value(schema, arg_type, &value.item) + } + else { + false + } + }); + + all_types_ok && remaining_required_fields.is_empty() + } + else { + false + } + } + } + } + } +} diff --git a/src/validation/context.rs b/src/validation/context.rs new file mode 100644 index 00000000..99994ec8 --- /dev/null +++ b/src/validation/context.rs @@ -0,0 +1,174 @@ +use std::collections::HashSet; + +use ast::{Document, Definition, Type}; + +use schema::meta::MetaType; +use schema::model::SchemaType; + +use parser::SourcePosition; + +/// Query validation error +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct RuleError { + locations: Vec, + message: String, +} + +#[doc(hidden)] +pub struct ValidatorContext<'a> { + pub schema: &'a SchemaType, + errors: Vec, + type_stack: Vec>, + type_literal_stack: Vec>, + input_type_stack: Vec>, + input_type_literal_stack: Vec>, + parent_type_stack: Vec>, + fragment_names: HashSet, +} + +impl RuleError { + #[doc(hidden)] + pub fn new(message: &str, locations: &[SourcePosition]) -> RuleError { + RuleError { + message: message.to_owned(), + locations: locations.to_vec(), + } + } + + /// Access the message for a validation error + pub fn message(&self) -> &str { + &self.message + } + + /// Access the positions of the validation error + /// + /// All validation errors contain at least one source position, but some + /// validators supply extra context through multiple positions. + pub fn locations(&self) -> &[SourcePosition] { + &self.locations + } +} + +impl<'a> ValidatorContext<'a> { + #[doc(hidden)] + pub fn new(schema: &'a SchemaType, document: &Document) -> ValidatorContext<'a> { + ValidatorContext { + errors: Vec::new(), + schema: schema, + type_stack: Vec::new(), + type_literal_stack: Vec::new(), + parent_type_stack: Vec::new(), + input_type_stack: Vec::new(), + input_type_literal_stack: Vec::new(), + fragment_names: document.iter() + .filter_map(|def| match *def { + Definition::Fragment(ref frag) => Some(frag.item.name.item.clone()), + _ => None, + }) + .collect() + } + } + + #[doc(hidden)] + pub fn append_errors(&mut self, mut errors: Vec) { + self.errors.append(&mut errors); + } + + #[doc(hidden)] + pub fn report_error(&mut self, message: &str, locations: &[SourcePosition]) { + self.errors.push(RuleError::new(message, locations)) + } + + #[doc(hidden)] + pub fn into_errors(mut self) -> Vec { + self.errors.sort(); + self.errors + } + + #[doc(hidden)] + pub fn with_pushed_type(&mut self, t: Option<&Type>, f: F) + -> R + where F: FnOnce(&mut ValidatorContext<'a>) -> R + { + if let Some(t) = t { + self.type_stack.push(self.schema.concrete_type_by_name(t.innermost_name())); + } + else { + self.type_stack.push(None); + } + + self.type_literal_stack.push(t.map(|t| t.clone())); + + let res = f(self); + + self.type_literal_stack.pop(); + self.type_stack.pop(); + + res + } + + #[doc(hidden)] + pub fn with_pushed_parent_type(&mut self, f: F) + -> R + where F: FnOnce(&mut ValidatorContext<'a>) -> R + { + self.parent_type_stack.push(*self.type_stack.last().unwrap_or(&None)); + let res = f(self); + self.parent_type_stack.pop(); + + res + } + + #[doc(hidden)] + pub fn with_pushed_input_type(&mut self, t: Option<&Type>, f: F) + -> R + where F: FnOnce(&mut ValidatorContext<'a>) -> R + { + if let Some(t) = t { + self.input_type_stack.push(self.schema.concrete_type_by_name(t.innermost_name())); + } + else { + self.input_type_stack.push(None); + } + + self.input_type_literal_stack.push(t.map(|t| t.clone())); + + let res = f(self); + + self.input_type_literal_stack.pop(); + self.input_type_stack.pop(); + + res + } + + #[doc(hidden)] + pub fn current_type(&self) -> Option<&'a MetaType> { + *self.type_stack.last().unwrap_or(&None) + } + + #[doc(hidden)] + pub fn current_type_literal(&self) -> Option<&Type> { + match self.type_literal_stack.last() { + Some(&Some(ref t)) => Some(t), + _ => None + } + } + + #[doc(hidden)] + pub fn parent_type(&self) -> Option<&'a MetaType> { + *self.parent_type_stack.last().unwrap_or(&None) + } + + #[doc(hidden)] + pub fn current_input_type_literal(&self) -> Option<&Type> { + match self.input_type_literal_stack.last() { + Some(&Some(ref t)) => Some(t), + _ => None, + } + } + + #[doc(hidden)] + pub fn is_known_fragment(&self, name: &str) -> bool { + self.fragment_names.contains(name) + } +} diff --git a/src/validation/mod.rs b/src/validation/mod.rs new file mode 100644 index 00000000..7f7b58b3 --- /dev/null +++ b/src/validation/mod.rs @@ -0,0 +1,21 @@ +//! Query validation related methods and data structures + +mod visitor; +mod traits; +mod context; +mod multi_visitor; +mod rules; + +#[cfg(test)] +mod test_harness; + +pub use self::traits::Visitor; +pub use self::visitor::visit; +pub use self::context::{RuleError, ValidatorContext}; +pub use self::rules::visit_all_rules; +pub use self::multi_visitor::MultiVisitor; + +#[cfg(test)] +pub use self::test_harness::{ + expect_passes_rule, expect_fails_rule, + expect_passes_rule_with_schema, expect_fails_rule_with_schema}; diff --git a/src/validation/multi_visitor.rs b/src/validation/multi_visitor.rs new file mode 100644 index 00000000..8fb7d3d8 --- /dev/null +++ b/src/validation/multi_visitor.rs @@ -0,0 +1,160 @@ +use ast::{Document, Operation, Fragment, VariableDefinition, Selection, + Directive, InputValue, Field, FragmentSpread, InlineFragment}; +use parser::Spanning; +use validation::{ValidatorContext, Visitor}; + +#[doc(hidden)] +pub struct MultiVisitor<'a> { + visitors: Vec + 'a>> +} + +impl<'a> MultiVisitor<'a> { + #[doc(hidden)] + pub fn new(visitors: Vec + 'a>>) -> MultiVisitor<'a> { + MultiVisitor { + visitors: visitors + } + } + + fn visit_all + 'a>) -> ()>(&mut self, mut f: F) { + for mut v in self.visitors.iter_mut() { + f(v); + } + } +} + +impl<'a> Visitor<'a> for MultiVisitor<'a> { + fn enter_document(&mut self, ctx: &mut ValidatorContext<'a>, doc: &'a Document) { + self.visit_all(|v| v.enter_document(ctx, doc)); + } + + fn exit_document(&mut self, ctx: &mut ValidatorContext<'a>, doc: &'a Document) { + self.visit_all(|v| v.exit_document(ctx, doc)); + } + + fn enter_operation_definition(&mut self, ctx: &mut ValidatorContext<'a>, op: &'a Spanning) { + self.visit_all(|v| v.enter_operation_definition(ctx, op)); + } + fn exit_operation_definition(&mut self, ctx: &mut ValidatorContext<'a>, op: &'a Spanning) { + self.visit_all(|v| v.exit_operation_definition(ctx, op)); + } + + fn enter_fragment_definition(&mut self, ctx: &mut ValidatorContext<'a>, f: &'a Spanning) { + self.visit_all(|v| v.enter_fragment_definition(ctx, f)); + } + fn exit_fragment_definition(&mut self, ctx: &mut ValidatorContext<'a>, f: &'a Spanning) { + self.visit_all(|v| v.exit_fragment_definition(ctx, f)); + } + + fn enter_variable_definition(&mut self, ctx: &mut ValidatorContext<'a>, def: &'a (Spanning, VariableDefinition)) { + self.visit_all(|v| v.enter_variable_definition(ctx, def)); + } + fn exit_variable_definition(&mut self, ctx: &mut ValidatorContext<'a>, def: &'a (Spanning, VariableDefinition)) { + self.visit_all(|v| v.exit_variable_definition(ctx, def)); + } + + fn enter_directive(&mut self, ctx: &mut ValidatorContext<'a>, d: &'a Spanning) { + self.visit_all(|v| v.enter_directive(ctx, d)); + } + fn exit_directive(&mut self, ctx: &mut ValidatorContext<'a>, d: &'a Spanning) { + self.visit_all(|v| v.exit_directive(ctx, d)); + } + + fn enter_argument(&mut self, ctx: &mut ValidatorContext<'a>, arg: &'a (Spanning, Spanning)) { + self.visit_all(|v| v.enter_argument(ctx, arg)); + } + fn exit_argument(&mut self, ctx: &mut ValidatorContext<'a>, arg: &'a (Spanning, Spanning)) { + self.visit_all(|v| v.exit_argument(ctx, arg)); + } + + fn enter_selection_set(&mut self, ctx: &mut ValidatorContext<'a>, s: &'a Vec) { + self.visit_all(|v| v.enter_selection_set(ctx, s)); + } + fn exit_selection_set(&mut self, ctx: &mut ValidatorContext<'a>, s: &'a Vec) { + self.visit_all(|v| v.exit_selection_set(ctx, s)); + } + + fn enter_field(&mut self, ctx: &mut ValidatorContext<'a>, f: &'a Spanning) { + self.visit_all(|v| v.enter_field(ctx, f)); + } + fn exit_field(&mut self, ctx: &mut ValidatorContext<'a>, f: &'a Spanning) { + self.visit_all(|v| v.exit_field(ctx, f)); + } + + fn enter_fragment_spread(&mut self, ctx: &mut ValidatorContext<'a>, s: &'a Spanning) { + self.visit_all(|v| v.enter_fragment_spread(ctx, s)); + } + fn exit_fragment_spread(&mut self, ctx: &mut ValidatorContext<'a>, s: &'a Spanning) { + self.visit_all(|v| v.exit_fragment_spread(ctx, s)); + } + + fn enter_inline_fragment(&mut self, ctx: &mut ValidatorContext<'a>, f: &'a Spanning) { + self.visit_all(|v| v.enter_inline_fragment(ctx, f)); + } + fn exit_inline_fragment(&mut self, ctx: &mut ValidatorContext<'a>, f: &'a Spanning) { + self.visit_all(|v| v.exit_inline_fragment(ctx, f)); + } + + fn enter_int_value(&mut self, ctx: &mut ValidatorContext<'a>, i: Spanning) { + self.visit_all(|v| v.enter_int_value(ctx, i.clone())); + } + fn exit_int_value(&mut self, ctx: &mut ValidatorContext<'a>, i: Spanning) { + self.visit_all(|v| v.exit_int_value(ctx, i.clone())); + } + + fn enter_float_value(&mut self, ctx: &mut ValidatorContext<'a>, f: Spanning) { + self.visit_all(|v| v.enter_float_value(ctx, f.clone())); + } + fn exit_float_value(&mut self, ctx: &mut ValidatorContext<'a>, f: Spanning) { + self.visit_all(|v| v.exit_float_value(ctx, f.clone())); + } + + fn enter_string_value(&mut self, ctx: &mut ValidatorContext<'a>, s: Spanning<&'a String>) { + self.visit_all(|v| v.enter_string_value(ctx, s.clone())); + } + fn exit_string_value(&mut self, ctx: &mut ValidatorContext<'a>, s: Spanning<&'a String>) { + self.visit_all(|v| v.exit_string_value(ctx, s.clone())); + } + + fn enter_boolean_value(&mut self, ctx: &mut ValidatorContext<'a>, b: Spanning) { + self.visit_all(|v| v.enter_boolean_value(ctx, b.clone())); + } + fn exit_boolean_value(&mut self, ctx: &mut ValidatorContext<'a>, b: Spanning) { + self.visit_all(|v| v.exit_boolean_value(ctx, b.clone())); + } + + fn enter_enum_value(&mut self, ctx: &mut ValidatorContext<'a>, s: Spanning<&'a String>) { + self.visit_all(|v| v.enter_enum_value(ctx, s.clone())); + } + fn exit_enum_value(&mut self, ctx: &mut ValidatorContext<'a>, s: Spanning<&'a String>) { + self.visit_all(|v| v.exit_enum_value(ctx, s.clone())); + } + + fn enter_variable_value(&mut self, ctx: &mut ValidatorContext<'a>, s: Spanning<&'a String>) { + self.visit_all(|v| v.enter_variable_value(ctx, s.clone())); + } + fn exit_variable_value(&mut self, ctx: &mut ValidatorContext<'a>, s: Spanning<&'a String>) { + self.visit_all(|v| v.exit_variable_value(ctx, s.clone())); + } + + fn enter_list_value(&mut self, ctx: &mut ValidatorContext<'a>, l: Spanning<&'a Vec>>) { + self.visit_all(|v| v.enter_list_value(ctx, l.clone())); + } + fn exit_list_value(&mut self, ctx: &mut ValidatorContext<'a>, l: Spanning<&'a Vec>>) { + self.visit_all(|v| v.exit_list_value(ctx, l.clone())); + } + + fn enter_object_value(&mut self, ctx: &mut ValidatorContext<'a>, o: Spanning<&'a Vec<(Spanning, Spanning)>>) { + self.visit_all(|v| v.enter_object_value(ctx, o.clone())); + } + fn exit_object_value(&mut self, ctx: &mut ValidatorContext<'a>, o: Spanning<&'a Vec<(Spanning, Spanning)>>) { + self.visit_all(|v| v.exit_object_value(ctx, o.clone())); + } + + fn enter_object_field(&mut self, ctx: &mut ValidatorContext<'a>, f: &'a (Spanning, Spanning)) { + self.visit_all(|v| v.enter_object_field(ctx, f)); + } + fn exit_object_field(&mut self, ctx: &mut ValidatorContext<'a>, f: &'a (Spanning, Spanning)) { + self.visit_all(|v| v.exit_object_field(ctx, f)); + } +} diff --git a/src/validation/rules/arguments_of_correct_type.rs b/src/validation/rules/arguments_of_correct_type.rs new file mode 100644 index 00000000..b924174f --- /dev/null +++ b/src/validation/rules/arguments_of_correct_type.rs @@ -0,0 +1,913 @@ +use ast::{Field, InputValue, Directive}; +use schema::meta::Argument; +use types::utilities::is_valid_literal_value; +use parser::Spanning; +use validation::{Visitor, ValidatorContext}; + +pub struct ArgumentsOfCorrectType<'a> { + current_args: Option<&'a Vec>, +} + +pub fn factory<'a>() -> ArgumentsOfCorrectType<'a> { + ArgumentsOfCorrectType { + current_args: None, + } +} + +impl<'a> Visitor<'a> for ArgumentsOfCorrectType<'a> { + fn enter_directive(&mut self, ctx: &mut ValidatorContext<'a>, directive: &'a Spanning) { + self.current_args = ctx.schema + .directive_by_name(&directive.item.name.item) + .map(|d| &d.arguments); + } + + fn exit_directive(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.current_args = None; + } + + fn enter_field(&mut self, ctx: &mut ValidatorContext<'a>, field: &'a Spanning) { + self.current_args = ctx.parent_type() + .and_then(|t| t.field_by_name(&field.item.name.item)) + .and_then(|f| f.arguments.as_ref()); + } + + fn exit_field(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.current_args = None; + } + + fn enter_argument(&mut self, ctx: &mut ValidatorContext<'a>, &(ref arg_name, ref arg_value): &'a (Spanning, Spanning)) { + if let Some(argument_meta) = self.current_args + .and_then(|args| args.iter().filter(|a| a.name == arg_name.item).next()) + { + let meta_type = ctx.schema.make_type(&argument_meta.arg_type); + + if !is_valid_literal_value(&ctx.schema, &meta_type, &arg_value.item) { + ctx.report_error( + &error_message(&arg_name.item, &format!("{}", argument_meta.arg_type)), + &[arg_value.start.clone()]); + } + } + } +} + +fn error_message(arg_name: &str, type_name: &str) -> String { + format!( + "Invalid value for argument \"{}\", expected type \"{}\"", + arg_name, type_name) +} + + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn good_int_value() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + intArgField(intArg: 2) + } + } + "#); + } + + #[test] + fn good_boolean_value() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + booleanArgField(booleanArg: true) + } + } + "#); + } + + #[test] + fn good_string_value() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + stringArgField(stringArg: "foo") + } + } + "#); + } + + #[test] + fn good_float_value() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + floatArgField(floatArg: 1.1) + } + } + "#); + } + + #[test] + fn int_into_float() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + floatArgField(floatArg: 1) + } + } + "#); + } + + #[test] + fn int_into_id() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + idArgField(idArg: 1) + } + } + "#); + } + + #[test] + fn string_into_id() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + idArgField(idArg: "someIdString") + } + } + "#); + } + + #[test] + fn good_enum_value() { + expect_passes_rule(factory, r#" + { + dog { + doesKnowCommand(dogCommand: SIT) + } + } + "#); + } + + #[test] + fn int_into_string() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + stringArgField(stringArg: 1) + } + } + "#, + &[ + RuleError::new(&error_message("stringArg", "String"), &[ + SourcePosition::new(89, 3, 42), + ]) + ]); + } + + #[test] + fn float_into_string() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + stringArgField(stringArg: 1.0) + } + } + "#, + &[ + RuleError::new(&error_message("stringArg", "String"), &[ + SourcePosition::new(89, 3, 42), + ]) + ]); + } + + #[test] + fn boolean_into_string() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + stringArgField(stringArg: true) + } + } + "#, + &[ + RuleError::new(&error_message("stringArg", "String"), &[ + SourcePosition::new(89, 3, 42), + ]) + ]); + } + + #[test] + fn unquoted_string_into_string() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + stringArgField(stringArg: BAR) + } + } + "#, + &[ + RuleError::new(&error_message("stringArg", "String"), &[ + SourcePosition::new(89, 3, 42), + ]) + ]); + } + + #[test] + fn string_into_int() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + intArgField(intArg: "3") + } + } + "#, + &[ + RuleError::new(&error_message("intArg", "Int"), &[ + SourcePosition::new(83, 3, 36), + ]) + ]); + } + + #[test] + fn unquoted_string_into_int() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + intArgField(intArg: FOO) + } + } + "#, + &[ + RuleError::new(&error_message("intArg", "Int"), &[ + SourcePosition::new(83, 3, 36), + ]) + ]); + } + + #[test] + fn simple_float_into_int() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + intArgField(intArg: 3.0) + } + } + "#, + &[ + RuleError::new(&error_message("intArg", "Int"), &[ + SourcePosition::new(83, 3, 36), + ]) + ]); + } + + #[test] + fn float_into_int() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + intArgField(intArg: 3.333) + } + } + "#, + &[ + RuleError::new(&error_message("intArg", "Int"), &[ + SourcePosition::new(83, 3, 36), + ]) + ]); + } + + #[test] + fn string_into_float() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + floatArgField(floatArg: "3.333") + } + } + "#, + &[ + RuleError::new(&error_message("floatArg", "Float"), &[ + SourcePosition::new(87, 3, 40), + ]) + ]); + } + + #[test] + fn boolean_into_float() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + floatArgField(floatArg: true) + } + } + "#, + &[ + RuleError::new(&error_message("floatArg", "Float"), &[ + SourcePosition::new(87, 3, 40), + ]) + ]); + } + + #[test] + fn unquoted_into_float() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + floatArgField(floatArg: FOO) + } + } + "#, + &[ + RuleError::new(&error_message("floatArg", "Float"), &[ + SourcePosition::new(87, 3, 40), + ]) + ]); + } + + #[test] + fn int_into_boolean() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + booleanArgField(booleanArg: 2) + } + } + "#, + &[ + RuleError::new(&error_message("booleanArg", "Boolean"), &[ + SourcePosition::new(91, 3, 44), + ]) + ]); + } + + #[test] + fn float_into_boolean() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + booleanArgField(booleanArg: 1.0) + } + } + "#, + &[ + RuleError::new(&error_message("booleanArg", "Boolean"), &[ + SourcePosition::new(91, 3, 44), + ]) + ]); + } + + #[test] + fn string_into_boolean() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + booleanArgField(booleanArg: "true") + } + } + "#, + &[ + RuleError::new(&error_message("booleanArg", "Boolean"), &[ + SourcePosition::new(91, 3, 44), + ]) + ]); + } + + #[test] + fn unquoted_into_boolean() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + booleanArgField(booleanArg: TRUE) + } + } + "#, + &[ + RuleError::new(&error_message("booleanArg", "Boolean"), &[ + SourcePosition::new(91, 3, 44), + ]) + ]); + } + + #[test] + fn float_into_id() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + idArgField(idArg: 1.0) + } + } + "#, + &[ + RuleError::new(&error_message("idArg", "ID"), &[ + SourcePosition::new(81, 3, 34), + ]) + ]); + } + + #[test] + fn boolean_into_id() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + idArgField(idArg: true) + } + } + "#, + &[ + RuleError::new(&error_message("idArg", "ID"), &[ + SourcePosition::new(81, 3, 34), + ]) + ]); + } + + #[test] + fn unquoted_into_id() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + idArgField(idArg: SOMETHING) + } + } + "#, + &[ + RuleError::new(&error_message("idArg", "ID"), &[ + SourcePosition::new(81, 3, 34), + ]) + ]); + } + + #[test] + fn int_into_enum() { + expect_fails_rule(factory, r#" + { + dog { + doesKnowCommand(dogCommand: 2) + } + } + "#, + &[ + RuleError::new(&error_message("dogCommand", "DogCommand"), &[ + SourcePosition::new(79, 3, 44), + ]) + ]); + } + + #[test] + fn float_into_enum() { + expect_fails_rule(factory, r#" + { + dog { + doesKnowCommand(dogCommand: 1.0) + } + } + "#, + &[ + RuleError::new(&error_message("dogCommand", "DogCommand"), &[ + SourcePosition::new(79, 3, 44), + ]) + ]); + } + + #[test] + fn string_into_enum() { + expect_fails_rule(factory, r#" + { + dog { + doesKnowCommand(dogCommand: "SIT") + } + } + "#, + &[ + RuleError::new(&error_message("dogCommand", "DogCommand"), &[ + SourcePosition::new(79, 3, 44), + ]) + ]); + } + + #[test] + fn boolean_into_enum() { + expect_fails_rule(factory, r#" + { + dog { + doesKnowCommand(dogCommand: true) + } + } + "#, + &[ + RuleError::new(&error_message("dogCommand", "DogCommand"), &[ + SourcePosition::new(79, 3, 44), + ]) + ]); + } + + #[test] + fn unknown_enum_value_into_enum() { + expect_fails_rule(factory, r#" + { + dog { + doesKnowCommand(dogCommand: JUGGLE) + } + } + "#, + &[ + RuleError::new(&error_message("dogCommand", "DogCommand"), &[ + SourcePosition::new(79, 3, 44), + ]) + ]); + } + + #[test] + fn different_case_enum_value_into_enum() { + expect_fails_rule(factory, r#" + { + dog { + doesKnowCommand(dogCommand: sit) + } + } + "#, + &[ + RuleError::new(&error_message("dogCommand", "DogCommand"), &[ + SourcePosition::new(79, 3, 44), + ]) + ]); + } + + #[test] + fn good_list_value() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + stringListArgField(stringListArg: ["one", "two"]) + } + } + "#); + } + + #[test] + fn empty_list_value() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + stringListArgField(stringListArg: []) + } + } + "#); + } + + #[test] + fn single_value_into_list() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + stringListArgField(stringListArg: "one") + } + } + "#); + } + + #[test] + fn incorrect_item_type() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + stringListArgField(stringListArg: ["one", 2]) + } + } + "#, + &[ + RuleError::new(&error_message("stringListArg", "[String]"), &[ + SourcePosition::new(97, 3, 50), + ]) + ]); + } + + #[test] + fn single_value_of_incorrect_type() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + stringListArgField(stringListArg: 1) + } + } + "#, + &[ + RuleError::new(&error_message("stringListArg", "[String]"), &[ + SourcePosition::new(97, 3, 50), + ]) + ]); + } + + #[test] + fn arg_on_optional_arg() { + expect_passes_rule(factory, r#" + { + dog { + isHousetrained(atOtherHomes: true) + } + } + "#); + } + + #[test] + fn no_arg_on_optional_arg() { + expect_passes_rule(factory, r#" + { + dog { + isHousetrained + } + } + "#); + } + + #[test] + fn multiple_args() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleReqs(req1: 1, req2: 2) + } + } + "#); + } + + #[test] + fn multiple_args_reverse_order() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleReqs(req2: 2, req1: 1) + } + } + "#); + } + + #[test] + fn no_args_on_multiple_optional() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOpts + } + } + "#); + } + + #[test] + fn one_arg_on_multiple_optional() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOpts(opt1: 1) + } + } + "#); + } + + #[test] + fn second_arg_on_multiple_optional() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOpts(opt2: 1) + } + } + "#); + } + + #[test] + fn multiple_reqs_on_mixed_list() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4) + } + } + "#); + } + + #[test] + fn multiple_reqs_and_one_opt_on_mixed_list() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4, opt1: 5) + } + } + "#); + } + + #[test] + fn all_reqs_and_opts_on_mixed_list() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6) + } + } + "#); + } + + #[test] + fn incorrect_value_type() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + multipleReqs(req2: "two", req1: "one") + } + } + "#, + &[ + RuleError::new(&error_message("req2", "Int!"), &[ + SourcePosition::new(82, 3, 35), + ]), + RuleError::new(&error_message("req1", "Int!"), &[ + SourcePosition::new(95, 3, 48), + ]), + ]); + } + + #[test] + fn incorrect_value_and_missing_argument() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + multipleReqs(req1: "one") + } + } + "#, + &[ + RuleError::new(&error_message("req1", "Int!"), &[ + SourcePosition::new(82, 3, 35), + ]), + ]); + } + + #[test] + fn optional_arg_despite_required_field_in_type() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + complexArgField + } + } + "#); + } + + #[test] + fn partial_object_only_required() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + complexArgField(complexArg: { requiredField: true }) + } + } + "#); + } + + #[test] + fn partial_object_required_field_can_be_falsy() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + complexArgField(complexArg: { requiredField: false }) + } + } + "#); + } + + #[test] + fn partial_object_including_required() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + complexArgField(complexArg: { requiredField: true, intField: 4 }) + } + } + "#); + } + + #[test] + fn full_object() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + complexArgField(complexArg: { + requiredField: true, + intField: 4, + stringField: "foo", + booleanField: false, + stringListField: ["one", "two"] + }) + } + } + "#); + } + + #[test] + fn full_object_with_fields_in_different_order() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + complexArgField(complexArg: { + stringListField: ["one", "two"], + booleanField: false, + requiredField: true, + stringField: "foo", + intField: 4, + }) + } + } + "#); + } + + #[test] + fn partial_object_missing_required() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + complexArgField(complexArg: { intField: 4 }) + } + } + "#, + &[ + RuleError::new(&error_message("complexArg", "ComplexInput"), &[ + SourcePosition::new(91, 3, 44), + ]), + ]); + } + + #[test] + fn partial_object_invalid_field_type() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + complexArgField(complexArg: { + stringListField: ["one", 2], + requiredField: true, + }) + } + } + "#, + &[ + RuleError::new(&error_message("complexArg", "ComplexInput"), &[ + SourcePosition::new(91, 3, 44), + ]), + ]); + } + + #[test] + fn partial_object_unknown_field_arg() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + complexArgField(complexArg: { + requiredField: true, + unknownField: "value" + }) + } + } + "#, + &[ + RuleError::new(&error_message("complexArg", "ComplexInput"), &[ + SourcePosition::new(91, 3, 44), + ]), + ]); + } + + #[test] + fn directive_with_valid_types() { + expect_passes_rule(factory, r#" + { + dog @include(if: true) { + name + } + human @skip(if: false) { + name + } + } + "#); + } + + #[test] + fn directive_with_incorrect_types() { + expect_fails_rule(factory, r#" + { + dog @include(if: "yes") { + name @skip(if: ENUM) + } + } + "#, + &[ + RuleError::new(&error_message("if", "Boolean!"), &[ + SourcePosition::new(38, 2, 27), + ]), + RuleError::new(&error_message("if", "Boolean!"), &[ + SourcePosition::new(74, 3, 27), + ]), + ]); + } +} diff --git a/src/validation/rules/default_values_of_correct_type.rs b/src/validation/rules/default_values_of_correct_type.rs new file mode 100644 index 00000000..f2ecdce7 --- /dev/null +++ b/src/validation/rules/default_values_of_correct_type.rs @@ -0,0 +1,154 @@ +use ast::VariableDefinition; +use types::utilities::is_valid_literal_value; +use parser::Spanning; +use validation::{Visitor, ValidatorContext}; + +pub struct DefaultValuesOfCorrectType { +} + +pub fn factory() -> DefaultValuesOfCorrectType { + DefaultValuesOfCorrectType { + } +} + +impl<'a> Visitor<'a> for DefaultValuesOfCorrectType { + fn enter_variable_definition(&mut self, ctx: &mut ValidatorContext<'a>, &(ref var_name, ref var_def): &'a (Spanning, VariableDefinition)) { + if let Some(Spanning { item: ref var_value, ref start, .. }) = var_def.default_value { + if var_def.var_type.item.is_non_null() { + ctx.report_error( + &non_null_error_message(&var_name.item, &format!("{}", var_def.var_type.item)), + &[start.clone()]) + } + else { + let meta_type = ctx.schema.make_type(&var_def.var_type.item); + + if !is_valid_literal_value(&ctx.schema, &meta_type, var_value) { + ctx.report_error( + &type_error_message(&var_name.item, &format!("{}", var_def.var_type.item)), + &[start.clone()]); + } + } + } + } +} + +fn type_error_message(arg_name: &str, type_name: &str) -> String { + format!( + "Invalid default value for argument \"{}\", expected type \"{}\"", + arg_name, type_name) +} + +fn non_null_error_message(arg_name: &str, type_name: &str) -> String { + format!( + "Argument \"{}\" has type \"{}\" and is not nullable, so it't can't have a default value", + arg_name, type_name) +} + +#[cfg(test)] +mod tests { + use super::{type_error_message, non_null_error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn variables_with_no_default_values() { + expect_passes_rule(factory, r#" + query NullableValues($a: Int, $b: String, $c: ComplexInput) { + dog { name } + } + "#); + } + + #[test] + fn required_variables_without_default_values() { + expect_passes_rule(factory, r#" + query RequiredValues($a: Int!, $b: String!) { + dog { name } + } + "#); + } + + #[test] + fn variables_with_valid_default_values() { + expect_passes_rule(factory, r#" + query WithDefaultValues( + $a: Int = 1, + $b: String = "ok", + $c: ComplexInput = { requiredField: true, intField: 3 } + ) { + dog { name } + } + "#); + } + + #[test] + fn no_required_variables_with_default_values() { + expect_fails_rule(factory, r#" + query UnreachableDefaultValues($a: Int! = 3, $b: String! = "default") { + dog { name } + } + "#, + &[ + RuleError::new(&non_null_error_message("a", "Int!"), &[ + SourcePosition::new(53, 1, 52), + ]), + RuleError::new(&non_null_error_message("b", "String!"), &[ + SourcePosition::new(70, 1, 69), + ]), + ]); + } + + #[test] + fn variables_with_invalid_default_values() { + expect_fails_rule(factory, r#" + query InvalidDefaultValues( + $a: Int = "one", + $b: String = 4, + $c: ComplexInput = "notverycomplex" + ) { + dog { name } + } + "#, + &[ + RuleError::new(&type_error_message("a", "Int"), &[ + SourcePosition::new(61, 2, 22), + ]), + RuleError::new(&type_error_message("b", "String"), &[ + SourcePosition::new(93, 3, 25), + ]), + RuleError::new(&type_error_message("c", "ComplexInput"), &[ + SourcePosition::new(127, 4, 31), + ]), + ]); + } + + #[test] + fn complex_variables_missing_required_field() { + expect_fails_rule(factory, r#" + query MissingRequiredField($a: ComplexInput = {intField: 3}) { + dog { name } + } + "#, + &[ + RuleError::new(&type_error_message("a", "ComplexInput"), &[ + SourcePosition::new(57, 1, 56), + ]), + ]); + } + + #[test] + fn list_variables_with_invalid_item() { + expect_fails_rule(factory, r#" + query InvalidItem($a: [String] = ["one", 2]) { + dog { name } + } + "#, + &[ + RuleError::new(&type_error_message("a", "[String]"), &[ + SourcePosition::new(44, 1, 43), + ]), + ]); + } + +} diff --git a/src/validation/rules/fields_on_correct_type.rs b/src/validation/rules/fields_on_correct_type.rs new file mode 100644 index 00000000..df814083 --- /dev/null +++ b/src/validation/rules/fields_on_correct_type.rs @@ -0,0 +1,272 @@ +use ast::Field; +use validation::{Visitor, ValidatorContext}; +use parser::Spanning; + +pub struct FieldsOnCorrectType {} + +pub fn factory() -> FieldsOnCorrectType { + FieldsOnCorrectType {} +} + +impl<'a> Visitor<'a> for FieldsOnCorrectType { + fn enter_field(&mut self, context: &mut ValidatorContext<'a>, field: &'a Spanning) { + { + if let Some(parent_type) = context.parent_type() { + let field_name = &field.item.name; + let type_name = parent_type.name().clone().unwrap_or(""); + + if parent_type.field_by_name(&field_name.item).is_none() { + context.report_error( + &error_message(&field_name.item, &type_name), + &[field_name.start.clone()]); + } + } + } + } +} + +fn error_message(field: &str, type_name: &str) -> String { + format!(r#"Unknown field "{}" on type "{}""#, field, type_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn selection_on_object() { + expect_passes_rule(factory, r#" + fragment objectFieldSelection on Dog { + __typename + name + } + "#); + } + + #[test] + fn aliased_selection_on_object() { + expect_passes_rule(factory, r#" + fragment aliasedObjectFieldSelection on Dog { + tn : __typename + otherName : name + } + "#); + } + + #[test] + fn selection_on_interface() { + expect_passes_rule(factory, r#" + fragment interfaceFieldSelection on Pet { + __typename + name + } + "#); + } + + #[test] + fn aliased_selection_on_interface() { + expect_passes_rule(factory, r#" + fragment interfaceFieldSelection on Pet { + otherName : name + } + "#); + } + + #[test] + fn lying_alias_selection() { + expect_passes_rule(factory, r#" + fragment lyingAliasSelection on Dog { + name : nickname + } + "#); + } + + #[test] + fn ignores_unknown_type() { + expect_passes_rule(factory, r#" + fragment unknownSelection on UnknownType { + unknownField + } + "#); + } + + #[test] + fn nested_unknown_fields() { + expect_fails_rule(factory, r#" + fragment typeKnownAgain on Pet { + unknown_pet_field { + ... on Cat { + unknown_cat_field + } + } + } + "#, + &[ + RuleError::new(&error_message("unknown_pet_field", "Pet"), &[ + SourcePosition::new(56, 2, 12) + ]), + RuleError::new(&error_message("unknown_cat_field", "Cat"), &[ + SourcePosition::new(119, 4, 16) + ]), + ]); + } + + #[test] + fn unknown_field_on_fragment() { + expect_fails_rule(factory, r#" + fragment fieldNotDefined on Dog { + meowVolume + } + "#, + &[ + RuleError::new(&error_message("meowVolume", "Dog"), &[ + SourcePosition::new(57, 2, 12) + ]), + ]); + } + + #[test] + fn ignores_deeply_unknown_field() { + expect_fails_rule(factory, r#" + fragment deepFieldNotDefined on Dog { + unknown_field { + deeper_unknown_field + } + } + "#, + &[ + RuleError::new(&error_message("unknown_field", "Dog"), &[ + SourcePosition::new(61, 2, 12) + ]), + ]); + } + + #[test] + fn unknown_subfield() { + expect_fails_rule(factory, r#" + fragment subFieldNotDefined on Human { + pets { + unknown_field + } + } + "#, + &[ + RuleError::new(&error_message("unknown_field", "Pet"), &[ + SourcePosition::new(83, 3, 14) + ]), + ]); + } + + #[test] + fn unknown_field_on_inline_fragment() { + expect_fails_rule(factory, r#" + fragment fieldNotDefined on Pet { + ... on Dog { + meowVolume + } + } + "#, + &[ + RuleError::new(&error_message("meowVolume", "Dog"), &[ + SourcePosition::new(84, 3, 14) + ]), + ]); + } + + #[test] + fn unknown_aliased_target() { + expect_fails_rule(factory, r#" + fragment aliasedFieldTargetNotDefined on Dog { + volume : mooVolume + } + "#, + &[ + RuleError::new(&error_message("mooVolume", "Dog"), &[ + SourcePosition::new(79, 2, 21) + ]), + ]); + } + + #[test] + fn unknown_aliased_lying_field_target() { + expect_fails_rule(factory, r#" + fragment aliasedLyingFieldTargetNotDefined on Dog { + barkVolume : kawVolume + } + "#, + &[ + RuleError::new(&error_message("kawVolume", "Dog"), &[ + SourcePosition::new(88, 2, 25) + ]), + ]); + } + + #[test] + fn not_defined_on_interface() { + expect_fails_rule(factory, r#" + fragment notDefinedOnInterface on Pet { + tailLength + } + "#, + &[ + RuleError::new(&error_message("tailLength", "Pet"), &[ + SourcePosition::new(63, 2, 12) + ]), + ]); + } + + #[test] + fn defined_in_concrete_types_but_not_interface() { + expect_fails_rule(factory, r#" + fragment definedOnImplementorsButNotInterface on Pet { + nickname + } + "#, + &[ + RuleError::new(&error_message("nickname", "Pet"), &[ + SourcePosition::new(78, 2, 12) + ]), + ]); + } + + #[test] + fn meta_field_on_union() { + expect_passes_rule(factory, r#" + fragment definedOnImplementorsButNotInterface on Pet { + __typename + } + "#); + } + + #[test] + fn fields_on_union() { + expect_fails_rule(factory, r#" + fragment definedOnImplementorsQueriedOnUnion on CatOrDog { + name + } + "#, + &[ + RuleError::new(&error_message("name", "CatOrDog"), &[ + SourcePosition::new(82, 2, 12) + ]), + ]); + } + + #[test] + fn valid_field_in_inline_fragment() { + expect_passes_rule(factory, r#" + fragment objectFieldSelection on Pet { + ... on Dog { + name + } + ... { + name + } + } + "#); + } + +} diff --git a/src/validation/rules/fragments_on_composite_types.rs b/src/validation/rules/fragments_on_composite_types.rs new file mode 100644 index 00000000..41159516 --- /dev/null +++ b/src/validation/rules/fragments_on_composite_types.rs @@ -0,0 +1,173 @@ +use ast::{Fragment, InlineFragment}; +use parser::Spanning; +use validation::{Visitor, ValidatorContext}; + +pub struct FragmentsOnCompositeTypes {} + +pub fn factory() -> FragmentsOnCompositeTypes { + FragmentsOnCompositeTypes {} +} + +impl<'a> Visitor<'a> for FragmentsOnCompositeTypes { + fn enter_fragment_definition(&mut self, context: &mut ValidatorContext<'a>, f: &'a Spanning) { + { + if let Some(current_type) = context.current_type() { + if !current_type.is_composite() { + let type_name = current_type.name().clone().unwrap_or(""); + let type_cond = &f.item.type_condition; + + context.report_error( + &error_message( + Some(&f.item.name.item.clone()), + &type_name), + &[type_cond.start.clone()]); + } + } + } + } + + fn enter_inline_fragment(&mut self, context: &mut ValidatorContext<'a>, f: &'a Spanning) { + { + if let Some(ref type_cond) = f.item.type_condition { + let invalid_type_name = context.current_type().iter() + .filter(|&t| !t.is_composite()) + .map(|t| t.name().clone().unwrap_or("")) + .next(); + + if let Some(name) = invalid_type_name { + context.report_error( + &error_message(None, &name), + &[type_cond.start.clone()]); + } + } + } + } +} + +fn error_message(fragment_name: Option<&str>, on_type: &str) -> String { + if let Some(name) = fragment_name { + format!( + r#"Fragment "{}" cannot condition non composite type "{}"#, + name, on_type) + } + else { + format!( + r#"Fragment cannot condition on non composite type "{}""#, + on_type) + } +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn on_object() { + expect_passes_rule(factory, r#" + fragment validFragment on Dog { + barks + } + "#); + } + + #[test] + fn on_interface() { + expect_passes_rule(factory, r#" + fragment validFragment on Pet { + name + } + "#); + } + + #[test] + fn on_object_inline() { + expect_passes_rule(factory, r#" + fragment validFragment on Pet { + ... on Dog { + barks + } + } + "#); + } + + #[test] + fn on_inline_without_type_cond() { + expect_passes_rule(factory, r#" + fragment validFragment on Pet { + ... { + name + } + } + "#); + } + + #[test] + fn on_union() { + expect_passes_rule(factory, r#" + fragment validFragment on CatOrDog { + __typename + } + "#); + } + + #[test] + fn not_on_scalar() { + expect_fails_rule(factory, r#" + fragment scalarFragment on Boolean { + bad + } + "#, + &[ + RuleError::new(&error_message(Some("scalarFragment"), "Boolean"), &[ + SourcePosition::new(38, 1, 37), + ]), + ]); + } + + #[test] + fn not_on_enum() { + expect_fails_rule(factory, r#" + fragment scalarFragment on FurColor { + bad + } + "#, + &[ + RuleError::new(&error_message(Some("scalarFragment"), "FurColor"), &[ + SourcePosition::new(38, 1, 37), + ]), + ]); + } + + #[test] + fn not_on_input_object() { + expect_fails_rule(factory, r#" + fragment inputFragment on ComplexInput { + stringField + } + "#, + &[ + RuleError::new(&error_message(Some("inputFragment"), "ComplexInput"), &[ + SourcePosition::new(37, 1, 36), + ]), + ]); + } + + #[test] + fn not_on_scalar_inline() { + expect_fails_rule(factory, r#" + fragment invalidFragment on Pet { + ... on String { + barks + } + } + "#, + &[ + RuleError::new(&error_message(None, "String"), &[ + SourcePosition::new(64, 2, 19), + ]), + ]); + } +} diff --git a/src/validation/rules/known_argument_names.rs b/src/validation/rules/known_argument_names.rs new file mode 100644 index 00000000..c9c48ebc --- /dev/null +++ b/src/validation/rules/known_argument_names.rs @@ -0,0 +1,228 @@ +use ast::{Field, InputValue, Directive}; +use schema::meta::Argument; +use parser::Spanning; +use validation::{ValidatorContext, Visitor}; + +#[derive(Debug)] +enum ArgumentPosition<'a> { + Directive(&'a str), + Field(&'a str, &'a str), +} + +pub struct KnownArgumentNames<'a> { + current_args: Option<(ArgumentPosition<'a>, &'a Vec)>, +} + +pub fn factory<'a>() -> KnownArgumentNames<'a> { + KnownArgumentNames { + current_args: None, + } +} + +impl<'a> Visitor<'a> for KnownArgumentNames<'a> { + fn enter_directive(&mut self, ctx: &mut ValidatorContext<'a>, directive: &'a Spanning) { + self.current_args = ctx.schema + .directive_by_name(&directive.item.name.item) + .map(|d| (ArgumentPosition::Directive(&directive.item.name.item), &d.arguments)); + } + + fn exit_directive(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.current_args = None; + } + + fn enter_field(&mut self, ctx: &mut ValidatorContext<'a>, field: &'a Spanning) { + self.current_args = ctx.parent_type() + .and_then(|t| t.field_by_name(&field.item.name.item)) + .and_then(|f| f.arguments.as_ref()) + .map(|args| ( + ArgumentPosition::Field( + &field.item.name.item, + &ctx.parent_type().expect("Parent type should exist") + .name().expect("Parent type should be named")), + args)); + } + + fn exit_field(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.current_args = None; + } + + fn enter_argument(&mut self, ctx: &mut ValidatorContext<'a>, &(ref arg_name, _): &'a (Spanning, Spanning)) { + if let Some((ref pos, args)) = self.current_args { + if args.iter().filter(|a| a.name == arg_name.item).next().is_none() { + let message = match *pos { + ArgumentPosition::Field(ref field_name, ref type_name) => + field_error_message(&arg_name.item, field_name, type_name), + ArgumentPosition::Directive(ref directive_name) => + directive_error_message(&arg_name.item, directive_name), + }; + + ctx.report_error( + &message, + &[arg_name.start.clone()]); + } + } + } +} + +fn field_error_message(arg_name: &str, field_name: &str, type_name: &str) -> String { + format!( + r#"Unknown argument "{}" on field "{}" of type "{}""#, + arg_name, field_name, type_name) +} + +fn directive_error_message(arg_name: &str, directive_name: &str) -> String { + format!( + r#"Unknown argument "{}" on directive "{}""#, + arg_name, directive_name) +} + +#[cfg(test)] +mod tests { + use super::{field_error_message, directive_error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn single_arg_is_known() { + expect_passes_rule(factory, r#" + fragment argOnRequiredArg on Dog { + doesKnowCommand(dogCommand: SIT) + } + "#); + } + + #[test] + fn multiple_args_are_known() { + expect_passes_rule(factory, r#" + fragment multipleArgs on ComplicatedArgs { + multipleReqs(req1: 1, req2: 2) + } + "#); + } + + #[test] + fn ignores_args_of_unknown_fields() { + expect_passes_rule(factory, r#" + fragment argOnUnknownField on Dog { + unknownField(unknownArg: SIT) + } + "#); + } + + #[test] + fn multiple_args_in_reverse_order_are_known() { + expect_passes_rule(factory, r#" + fragment multipleArgsReverseOrder on ComplicatedArgs { + multipleReqs(req2: 2, req1: 1) + } + "#); + } + + #[test] + fn no_args_on_optional_arg() { + expect_passes_rule(factory, r#" + fragment noArgOnOptionalArg on Dog { + isHousetrained + } + "#); + } + + #[test] + fn args_are_known_deeply() { + expect_passes_rule(factory, r#" + { + dog { + doesKnowCommand(dogCommand: SIT) + } + human { + pet { + ... on Dog { + doesKnowCommand(dogCommand: SIT) + } + } + } + } + "#); + } + + #[test] + fn directive_args_are_known() { + expect_passes_rule(factory, r#" + { + dog @skip(if: true) + } + "#); + } + + #[test] + fn undirective_args_are_invalid() { + expect_fails_rule(factory, r#" + { + dog @skip(unless: true) + } + "#, + &[ + RuleError::new(&directive_error_message("unless", "skip"), &[ + SourcePosition::new(35, 2, 22), + ]), + ]); + } + + #[test] + fn invalid_arg_name() { + expect_fails_rule(factory, r#" + fragment invalidArgName on Dog { + doesKnowCommand(unknown: true) + } + "#, + &[ + RuleError::new(&field_error_message("unknown", "doesKnowCommand", "Dog"), &[ + SourcePosition::new(72, 2, 28), + ]), + ]); + } + + #[test] + fn unknown_args_amongst_known_args() { + expect_fails_rule(factory, r#" + fragment oneGoodArgOneInvalidArg on Dog { + doesKnowCommand(whoknows: 1, dogCommand: SIT, unknown: true) + } + "#, + &[ + RuleError::new(&field_error_message("whoknows", "doesKnowCommand", "Dog"), &[ + SourcePosition::new(81, 2, 28), + ]), + RuleError::new(&field_error_message("unknown", "doesKnowCommand", "Dog"), &[ + SourcePosition::new(111, 2, 58), + ]), + ]); + } + + #[test] + fn unknown_args_deeply() { + expect_fails_rule(factory, r#" + { + dog { + doesKnowCommand(unknown: true) + } + human { + pet { + ... on Dog { + doesKnowCommand(unknown: true) + } + } + } + } + "#, + &[ + RuleError::new(&field_error_message("unknown", "doesKnowCommand", "Dog"), &[ + SourcePosition::new(61, 3, 30), + ]), + RuleError::new(&field_error_message("unknown", "doesKnowCommand", "Dog"), &[ + SourcePosition::new(193, 8, 34), + ]), + ]); + } +} diff --git a/src/validation/rules/known_directives.rs b/src/validation/rules/known_directives.rs new file mode 100644 index 00000000..f000a79a --- /dev/null +++ b/src/validation/rules/known_directives.rs @@ -0,0 +1,216 @@ +use ast::{Directive, Operation, OperationType, Fragment, FragmentSpread, Field, InlineFragment}; +use validation::{ValidatorContext, Visitor}; +use schema::model::DirectiveLocation; +use parser::Spanning; + +pub struct KnownDirectives { + location_stack: Vec, +} + +pub fn factory() -> KnownDirectives { + KnownDirectives { + location_stack: Vec::new(), + } +} + +impl<'a> Visitor<'a> for KnownDirectives { + fn enter_operation_definition(&mut self, _: &mut ValidatorContext<'a>, op: &'a Spanning) { + self.location_stack.push(match op.item.operation_type { + OperationType::Query => DirectiveLocation::Query, + OperationType::Mutation => DirectiveLocation::Mutation, + }); + } + + fn exit_operation_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + let top = self.location_stack.pop(); + assert!(top == Some(DirectiveLocation::Query) || top == Some(DirectiveLocation::Mutation)); + } + + fn enter_field(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.location_stack.push(DirectiveLocation::Field); + } + + fn exit_field(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + let top = self.location_stack.pop(); + assert_eq!(top, Some(DirectiveLocation::Field)); + } + + fn enter_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.location_stack.push(DirectiveLocation::FragmentDefinition); + } + + fn exit_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + let top = self.location_stack.pop(); + assert_eq!(top, Some(DirectiveLocation::FragmentDefinition)); + } + + fn enter_fragment_spread(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.location_stack.push(DirectiveLocation::FragmentSpread); + } + + fn exit_fragment_spread(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + let top = self.location_stack.pop(); + assert_eq!(top, Some(DirectiveLocation::FragmentSpread)); + } + + fn enter_inline_fragment(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.location_stack.push(DirectiveLocation::InlineFragment); + } + + fn exit_inline_fragment(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + let top = self.location_stack.pop(); + assert_eq!(top, Some(DirectiveLocation::InlineFragment)); + } + + fn enter_directive(&mut self, ctx: &mut ValidatorContext<'a>, directive: &'a Spanning) { + let directive_name = &directive.item.name.item; + + if let Some(directive_type) = ctx.schema.directive_by_name(directive_name) { + if let Some(current_location) = self.location_stack.last() { + if directive_type.locations.iter().filter(|l| l == ¤t_location).next().is_none() { + ctx.report_error( + &misplaced_error_message(directive_name, current_location), + &[directive.start.clone()]); + } + } + } + else { + ctx.report_error( + &unknown_error_message(directive_name), + &[directive.start.clone()]); + } + } +} + +fn unknown_error_message(directive_name: &str) -> String { + format!(r#"Unknown directive "{}""#, directive_name) +} + +fn misplaced_error_message(directive_name: &str, location: &DirectiveLocation) -> String { + format!(r#"Directive "{}" may not be used on {}"#, directive_name, location) +} + +#[cfg(test)] +mod tests { + use super::{unknown_error_message, misplaced_error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + use schema::model::DirectiveLocation; + + #[test] + fn with_no_directives() { + expect_passes_rule(factory, r#" + query Foo { + name + ...Frag + } + + fragment Frag on Dog { + name + } + "#); + } + + #[test] + fn with_known_directives() { + expect_passes_rule(factory, r#" + { + dog @include(if: true) { + name + } + human @skip(if: false) { + name + } + } + "#); + } + + #[test] + fn with_unknown_directive() { + expect_fails_rule(factory, r#" + { + dog @unknown(directive: "value") { + name + } + } + "#, + &[ + RuleError::new(&unknown_error_message("unknown"), &[ + SourcePosition::new(29, 2, 16), + ]), + ]); + } + + #[test] + fn with_many_unknown_directives() { + expect_fails_rule(factory, r#" + { + dog @unknown(directive: "value") { + name + } + human @unknown(directive: "value") { + name + pets @unknown(directive: "value") { + name + } + } + } + "#, + &[ + RuleError::new(&unknown_error_message("unknown"), &[ + SourcePosition::new(29, 2, 16), + ]), + RuleError::new(&unknown_error_message("unknown"), &[ + SourcePosition::new(111, 5, 18), + ]), + RuleError::new(&unknown_error_message("unknown"), &[ + SourcePosition::new(180, 7, 19), + ]), + ]); + } + + #[test] + fn with_well_placed_directives() { + expect_passes_rule(factory, r#" + query Foo @onQuery { + name @include(if: true) + ...Frag @include(if: true) + skippedField @skip(if: true) + ...SkippedFrag @skip(if: true) + } + + mutation Bar @onMutation { + someField + } + "#); + } + + #[test] + fn with_misplaced_directives() { + expect_fails_rule(factory, r#" + query Foo @include(if: true) { + name @onQuery + ...Frag @onQuery + } + + mutation Bar @onQuery { + someField + } + "#, + &[ + RuleError::new(&misplaced_error_message("include", &DirectiveLocation::Query), &[ + SourcePosition::new(21, 1, 20), + ]), + RuleError::new(&misplaced_error_message("onQuery", &DirectiveLocation::Field), &[ + SourcePosition::new(59, 2, 17), + ]), + RuleError::new(&misplaced_error_message("onQuery", &DirectiveLocation::FragmentSpread), &[ + SourcePosition::new(88, 3, 20), + ]), + RuleError::new(&misplaced_error_message("onQuery", &DirectiveLocation::Mutation), &[ + SourcePosition::new(133, 6, 23), + ]), + ]); + } +} diff --git a/src/validation/rules/known_fragment_names.rs b/src/validation/rules/known_fragment_names.rs new file mode 100644 index 00000000..77d60a5b --- /dev/null +++ b/src/validation/rules/known_fragment_names.rs @@ -0,0 +1,88 @@ +use ast::FragmentSpread; +use validation::{ValidatorContext, Visitor}; +use parser::Spanning; + +pub struct KnownFragmentNames {} + +pub fn factory() -> KnownFragmentNames { + KnownFragmentNames {} +} + +impl<'a> Visitor<'a> for KnownFragmentNames { + fn enter_fragment_spread(&mut self, context: &mut ValidatorContext<'a>, spread: &'a Spanning) { + let spread_name = &spread.item.name; + if !context.is_known_fragment(&spread_name.item) { + context.report_error( + &error_message(&spread_name.item), + &[spread_name.start.clone()]); + } + } +} + +fn error_message(frag_name: &str) -> String { + format!(r#"Unknown fragment: "{}""#, frag_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn known() { + expect_passes_rule(factory, r#" + { + human(id: 4) { + ...HumanFields1 + ... on Human { + ...HumanFields2 + } + ... { + name + } + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + "#); + } + + #[test] + fn unknown() { + expect_fails_rule(factory, r#" + { + human(id: 4) { + ...UnknownFragment1 + ... on Human { + ...UnknownFragment2 + } + } + } + fragment HumanFields on Human { + name + ...UnknownFragment3 + } + "#, + &[ + RuleError::new(&error_message("UnknownFragment1"), &[ + SourcePosition::new(57, 3, 17), + ]), + RuleError::new(&error_message("UnknownFragment2"), &[ + SourcePosition::new(122, 5, 19), + ]), + RuleError::new(&error_message("UnknownFragment3"), &[ + SourcePosition::new(255, 11, 15), + ]), + ]); + } +} diff --git a/src/validation/rules/known_type_names.rs b/src/validation/rules/known_type_names.rs new file mode 100644 index 00000000..835e63f8 --- /dev/null +++ b/src/validation/rules/known_type_names.rs @@ -0,0 +1,87 @@ +use ast::{Fragment, InlineFragment, VariableDefinition}; +use validation::{ValidatorContext, Visitor}; +use parser::{SourcePosition, Spanning}; + +pub struct KnownTypeNames {} + +pub fn factory() -> KnownTypeNames { + KnownTypeNames {} +} + +impl<'a> Visitor<'a> for KnownTypeNames { + fn enter_inline_fragment(&mut self, ctx: &mut ValidatorContext<'a>, fragment: &'a Spanning) { + if let Some(ref type_cond) = fragment.item.type_condition { + validate_type(ctx, &type_cond.item, &type_cond.start); + } + } + + fn enter_fragment_definition(&mut self, ctx: &mut ValidatorContext<'a>, fragment: &'a Spanning) { + let type_cond = &fragment.item.type_condition; + validate_type(ctx, &type_cond.item, &type_cond.start); + } + + fn enter_variable_definition(&mut self, ctx: &mut ValidatorContext<'a>, &(_, ref var_def): &'a (Spanning, VariableDefinition)) { + let type_name = var_def.var_type.item.innermost_name(); + validate_type(ctx, &type_name, &var_def.var_type.start); + } +} + +fn validate_type<'a>(ctx: &mut ValidatorContext<'a>, type_name: &str, location: &SourcePosition) { + if ctx.schema.type_by_name(type_name).is_none() { + ctx.report_error( + &error_message(type_name), + &[location.clone()]); + } +} + +fn error_message(type_name: &str) -> String { + format!(r#"Unknown type "{}""#, type_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn known_type_names_are_valid() { + expect_passes_rule(factory, r#" + query Foo($var: String, $required: [String!]!) { + user(id: 4) { + pets { ... on Pet { name }, ...PetFields, ... { name } } + } + } + fragment PetFields on Pet { + name + } + "#); + } + + #[test] + fn unknown_type_names_are_invalid() { + expect_fails_rule(factory, r#" + query Foo($var: JumbledUpLetters) { + user(id: 4) { + name + pets { ... on Badger { name }, ...PetFields } + } + } + fragment PetFields on Peettt { + name + } + "#, + &[ + RuleError::new(&error_message("JumbledUpLetters"), &[ + SourcePosition::new(27, 1, 26), + ]), + RuleError::new(&error_message("Badger"), &[ + SourcePosition::new(120, 4, 28), + ]), + RuleError::new(&error_message("Peettt"), &[ + SourcePosition::new(210, 7, 32), + ]), + ]); + } +} diff --git a/src/validation/rules/lone_anonymous_operation.rs b/src/validation/rules/lone_anonymous_operation.rs new file mode 100644 index 00000000..20d3ceb7 --- /dev/null +++ b/src/validation/rules/lone_anonymous_operation.rs @@ -0,0 +1,125 @@ +use ast::{Definition, Document, Operation}; +use validation::{ValidatorContext, Visitor}; +use parser::Spanning; + +pub struct LoneAnonymousOperation { + operation_count: Option, +} + +pub fn factory() -> LoneAnonymousOperation { + LoneAnonymousOperation { + operation_count: None, + } +} + +impl<'a> Visitor<'a> for LoneAnonymousOperation { + fn enter_document(&mut self, _: &mut ValidatorContext<'a>, doc: &'a Document) { + self.operation_count = Some(doc + .iter() + .filter(|d| match **d { + Definition::Operation(_) => true, + Definition::Fragment(_) => false, + }) + .count()); + } + + fn enter_operation_definition(&mut self, ctx: &mut ValidatorContext<'a>, op: &'a Spanning) { + if let Some(operation_count) = self.operation_count { + if operation_count > 1 && op.item.name.is_none() { + ctx.report_error(error_message(), &[op.start.clone()]); + } + } + } +} + +fn error_message() -> &'static str { + "This anonymous operation must be the only defined operation" +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn no_operations() { + expect_passes_rule(factory, r#" + fragment fragA on Type { + field + } + "#); + } + + #[test] + fn one_anon_operation() { + expect_passes_rule(factory, r#" + { + field + } + "#); + } + + #[test] + fn multiple_named_operations() { + expect_passes_rule(factory, r#" + query Foo { + field + } + + query Bar { + field + } + "#); + } + + #[test] + fn anon_operation_with_fragment() { + expect_passes_rule(factory, r#" + { + ...Foo + } + fragment Foo on Type { + field + } + "#); + } + + #[test] + fn multiple_anon_operations() { + expect_fails_rule(factory, r#" + { + fieldA + } + { + fieldB + } + "#, + &[ + RuleError::new(error_message(), &[ + SourcePosition::new(11, 1, 10), + ]), + RuleError::new(error_message(), &[ + SourcePosition::new(54, 4, 10), + ]), + ]); + } + + #[test] + fn anon_operation_with_a_mutation() { + expect_fails_rule(factory, r#" + { + fieldA + } + mutation Foo { + fieldB + } + "#, + &[ + RuleError::new(error_message(), &[ + SourcePosition::new(11, 1, 10), + ]), + ]); + } +} diff --git a/src/validation/rules/mod.rs b/src/validation/rules/mod.rs new file mode 100644 index 00000000..f5aa96b2 --- /dev/null +++ b/src/validation/rules/mod.rs @@ -0,0 +1,59 @@ +mod arguments_of_correct_type; +mod default_values_of_correct_type; +mod fields_on_correct_type; +mod fragments_on_composite_types; +mod known_argument_names; +mod known_directives; +mod known_fragment_names; +mod known_type_names; +mod lone_anonymous_operation; +mod no_fragment_cycles; +mod no_undefined_variables; +mod no_unused_fragments; +mod no_unused_variables; +mod overlapping_fields_can_be_merged; +mod possible_fragment_spreads; +mod provided_non_null_arguments; +mod scalar_leafs; +mod unique_argument_names; +mod unique_fragment_names; +mod unique_input_field_names; +mod unique_operation_names; +mod unique_variable_names; +mod variables_are_input_types; +mod variables_in_allowed_position; + +use ast::Document; +use validation::{ValidatorContext, MultiVisitor, visit}; + +#[doc(hidden)] +pub fn visit_all_rules<'a>(ctx: &mut ValidatorContext<'a>, doc: &'a Document) { + let mut mv = MultiVisitor::new(vec![ + Box::new(self::arguments_of_correct_type::factory()), + Box::new(self::default_values_of_correct_type::factory()), + Box::new(self::fields_on_correct_type::factory()), + Box::new(self::fragments_on_composite_types::factory()), + Box::new(self::known_argument_names::factory()), + Box::new(self::known_directives::factory()), + Box::new(self::known_fragment_names::factory()), + Box::new(self::known_type_names::factory()), + Box::new(self::lone_anonymous_operation::factory()), + Box::new(self::no_fragment_cycles::factory()), + Box::new(self::no_undefined_variables::factory()), + Box::new(self::no_unused_fragments::factory()), + Box::new(self::no_unused_variables::factory()), + Box::new(self::overlapping_fields_can_be_merged::factory()), + Box::new(self::possible_fragment_spreads::factory()), + Box::new(self::provided_non_null_arguments::factory()), + Box::new(self::scalar_leafs::factory()), + Box::new(self::unique_argument_names::factory()), + Box::new(self::unique_fragment_names::factory()), + Box::new(self::unique_input_field_names::factory()), + Box::new(self::unique_operation_names::factory()), + Box::new(self::unique_variable_names::factory()), + Box::new(self::variables_are_input_types::factory()), + Box::new(self::variables_in_allowed_position::factory()), + ]); + + visit(&mut mv, ctx, doc); +} diff --git a/src/validation/rules/no_fragment_cycles.rs b/src/validation/rules/no_fragment_cycles.rs new file mode 100644 index 00000000..ade106dc --- /dev/null +++ b/src/validation/rules/no_fragment_cycles.rs @@ -0,0 +1,333 @@ +use std::collections::{HashMap, HashSet}; + +use ast::{Fragment, FragmentSpread, Document}; +use validation::{ValidatorContext, Visitor, RuleError}; +use parser::Spanning; + +pub struct NoFragmentCycles<'a> { + current_fragment: Option<&'a str>, + spreads: HashMap<&'a str, Vec>>, + fragment_order: Vec<&'a str>, +} + +struct CycleDetector<'a> { + visited: HashSet<&'a str>, + spreads: &'a HashMap<&'a str, Vec>>, + path_indices: HashMap<&'a str, usize>, + errors: Vec, +} + +pub fn factory<'a>() -> NoFragmentCycles<'a> { + NoFragmentCycles { + current_fragment: None, + spreads: HashMap::new(), + fragment_order: Vec::new(), + } +} + +impl<'a> Visitor<'a> for NoFragmentCycles<'a> { + fn exit_document(&mut self, ctx: &mut ValidatorContext<'a>, _: &'a Document) { + assert!(self.current_fragment.is_none()); + + let mut detector = CycleDetector { + visited: HashSet::new(), + spreads: &self.spreads, + path_indices: HashMap::new(), + errors: Vec::new(), + }; + + for frag in &self.fragment_order { + if !detector.visited.contains(frag) { + let mut path = Vec::new(); + detector.detect_from(frag, &mut path); + } + } + + ctx.append_errors(detector.errors); + } + + fn enter_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, fragment: &'a Spanning) { + assert!(self.current_fragment.is_none()); + + let fragment_name = &fragment.item.name.item; + self.current_fragment = Some(&fragment_name); + self.fragment_order.push(&fragment_name); + } + + fn exit_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, fragment: &'a Spanning) { + assert_eq!(Some(fragment.item.name.item.as_str()), self.current_fragment); + self.current_fragment = None; + } + + fn enter_fragment_spread(&mut self, _: &mut ValidatorContext<'a>, spread: &'a Spanning) { + if let Some(ref current_fragment) = self.current_fragment { + self.spreads + .entry(¤t_fragment) + .or_insert_with(|| vec![]) + .push(Spanning::start_end( + &spread.start.clone(), + &spread.end.clone(), + &spread.item.name.item)); + } + } +} + +impl<'a> CycleDetector<'a> { + fn detect_from(&mut self, from: &'a str, path: &mut Vec<&'a Spanning<&'a str>>) { + self.visited.insert(from); + + if !self.spreads.contains_key(from) { + return; + } + + self.path_indices.insert(from, path.len()); + + for node in &self.spreads[from] { + let name = &node.item; + let index = self.path_indices.get(name).map(|i| *i); + + if let Some(index) = index { + let err_pos = if index < path.len() { + path[index] + } else { + node + }; + + self.errors.push(RuleError::new( + &error_message(name), + &[err_pos.start.clone()])); + } + else if !self.visited.contains(name) { + path.push(node); + self.detect_from(name, path); + path.pop(); + } + } + + self.path_indices.remove(from); + } +} + +fn error_message(frag_name: &str) -> String { + format!(r#"Cannot spread fragment "{}""#, frag_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn single_reference_is_valid() { + expect_passes_rule(factory, r#" + fragment fragA on Dog { ...fragB } + fragment fragB on Dog { name } + "#); + } + + #[test] + fn spreading_twice_is_not_circular() { + expect_passes_rule(factory, r#" + fragment fragA on Dog { ...fragB, ...fragB } + fragment fragB on Dog { name } + "#); + } + + #[test] + fn spreading_twice_indirectly_is_not_circular() { + expect_passes_rule(factory, r#" + fragment fragA on Dog { ...fragB, ...fragC } + fragment fragB on Dog { ...fragC } + fragment fragC on Dog { name } + "#); + } + + #[test] + fn double_spread_within_abstract_types() { + expect_passes_rule(factory, r#" + fragment nameFragment on Pet { + ... on Dog { name } + ... on Cat { name } + } + + fragment spreadsInAnon on Pet { + ... on Dog { ...nameFragment } + ... on Cat { ...nameFragment } + } + "#); + } + + #[test] + fn does_not_false_positive_on_unknown_fragment() { + expect_passes_rule(factory, r#" + fragment nameFragment on Pet { + ...UnknownFragment + } + "#); + } + + #[test] + fn spreading_recursively_within_field_fails() { + expect_fails_rule(factory, r#" + fragment fragA on Human { relatives { ...fragA } }, + "#, + &[ + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(49, 1, 48) + ]), + ]); + } + + #[test] + fn no_spreading_itself_directly() { + expect_fails_rule(factory, r#" + fragment fragA on Dog { ...fragA } + "#, + &[ + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(35, 1, 34) + ]), + ]); + } + + #[test] + fn no_spreading_itself_directly_within_inline_fragment() { + expect_fails_rule(factory, r#" + fragment fragA on Pet { + ... on Dog { + ...fragA + } + } + "#, + &[ + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(74, 3, 14) + ]), + ]); + } + + #[test] + fn no_spreading_itself_indirectly() { + expect_fails_rule(factory, r#" + fragment fragA on Dog { ...fragB } + fragment fragB on Dog { ...fragA } + "#, + &[ + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(35, 1, 34) + ]), + ]); + } + + #[test] + fn no_spreading_itself_indirectly_reports_opposite_order() { + expect_fails_rule(factory, r#" + fragment fragB on Dog { ...fragA } + fragment fragA on Dog { ...fragB } + "#, + &[ + RuleError::new(&error_message("fragB"), &[ + SourcePosition::new(35, 1, 34) + ]), + ]); + } + + #[test] + fn no_spreading_itself_indirectly_within_inline_fragment() { + expect_fails_rule(factory, r#" + fragment fragA on Pet { + ... on Dog { + ...fragB + } + } + fragment fragB on Pet { + ... on Dog { + ...fragA + } + } + "#, + &[ + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(74, 3, 14) + ]), + ]); + } + + #[test] + fn no_spreading_itself_deeply() { + expect_fails_rule(factory, r#" + fragment fragA on Dog { ...fragB } + fragment fragB on Dog { ...fragC } + fragment fragC on Dog { ...fragO } + fragment fragX on Dog { ...fragY } + fragment fragY on Dog { ...fragZ } + fragment fragZ on Dog { ...fragO } + fragment fragO on Dog { ...fragP } + fragment fragP on Dog { ...fragA, ...fragX } + "#, + &[ + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(35, 1, 34) + ]), + RuleError::new(&error_message("fragO"), &[ + SourcePosition::new(305, 7, 34) + ]), + ]); + } + + #[test] + fn no_spreading_itself_deeply_two_paths() { + expect_fails_rule(factory, r#" + fragment fragA on Dog { ...fragB, ...fragC } + fragment fragB on Dog { ...fragA } + fragment fragC on Dog { ...fragA } + "#, + &[ + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(35, 1, 34) + ]), + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(45, 1, 44) + ]), + ]); + } + + #[test] + fn no_spreading_itself_deeply_two_paths_alt_traversal_order() { + expect_fails_rule(factory, r#" + fragment fragA on Dog { ...fragC } + fragment fragB on Dog { ...fragC } + fragment fragC on Dog { ...fragA, ...fragB } + "#, + &[ + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(35, 1, 34) + ]), + RuleError::new(&error_message("fragC"), &[ + SourcePosition::new(135, 3, 44) + ]), + ]); + } + + #[test] + fn no_spreading_itself_deeply_and_immediately() { + expect_fails_rule(factory, r#" + fragment fragA on Dog { ...fragB } + fragment fragB on Dog { ...fragB, ...fragC } + fragment fragC on Dog { ...fragA, ...fragB } + "#, + &[ + RuleError::new(&error_message("fragA"), &[ + SourcePosition::new(35, 1, 34) + ]), + RuleError::new(&error_message("fragB"), &[ + SourcePosition::new(80, 2, 34) + ]), + RuleError::new(&error_message("fragB"), &[ + SourcePosition::new(90, 2, 44) + ]), + ]); + } +} diff --git a/src/validation/rules/no_undefined_variables.rs b/src/validation/rules/no_undefined_variables.rs new file mode 100644 index 00000000..3441793a --- /dev/null +++ b/src/validation/rules/no_undefined_variables.rs @@ -0,0 +1,488 @@ +use std::collections::{HashSet, HashMap}; +use ast::{Document, Fragment, FragmentSpread, VariableDefinition, Operation, InputValue}; +use validation::{ValidatorContext, Visitor, RuleError}; +use parser::{SourcePosition, Spanning}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Scope<'a> { + Operation(Option<&'a str>), + Fragment(&'a str), +} + +pub struct NoUndefinedVariables<'a> { + defined_variables: HashMap, (SourcePosition, HashSet<&'a str>)>, + used_variables: HashMap, Vec>>, + current_scope: Option>, + spreads: HashMap, Vec<&'a str>>, +} + +pub fn factory<'a>() -> NoUndefinedVariables<'a> { + NoUndefinedVariables { + defined_variables: HashMap::new(), + used_variables: HashMap::new(), + current_scope: None, + spreads: HashMap::new(), + } +} + +impl<'a> NoUndefinedVariables<'a> { + fn find_undef_vars(&'a self, scope: &Scope<'a>, defined: &HashSet<&'a str>, unused: &mut Vec<&'a Spanning<&'a str>>, visited: &mut HashSet>) { + if visited.contains(scope) { + return; + } + + visited.insert(scope.clone()); + + if let Some(used_vars) = self.used_variables.get(scope) { + for var in used_vars { + if !defined.contains(&var.item) { + unused.push(var); + } + } + } + + if let Some(spreads) = self.spreads.get(scope) { + for spread in spreads { + self.find_undef_vars(&Scope::Fragment(spread.clone()), defined, unused, visited); + } + } + } +} + +impl<'a> Visitor<'a> for NoUndefinedVariables<'a> { + fn exit_document(&mut self, ctx: &mut ValidatorContext<'a>, _: &'a Document) { + for (op_name, &(ref pos, ref def_vars)) in &self.defined_variables { + let mut unused = Vec::new(); + let mut visited = HashSet::new(); + self.find_undef_vars(&Scope::Operation(op_name.clone()), &def_vars, &mut unused, &mut visited); + + ctx.append_errors(unused + .into_iter() + .map(|var| RuleError::new( + &error_message(&var.item, op_name.clone()), + &[ + var.start.clone(), + pos.clone() + ])) + .collect()); + } + } + + fn enter_operation_definition(&mut self, _: &mut ValidatorContext<'a>, op: &'a Spanning) { + let op_name = op.item.name.as_ref().map(|s| s.item.as_str()); + self.current_scope = Some(Scope::Operation(op_name)); + self.defined_variables.insert(op_name, (op.start.clone(), HashSet::new())); + } + + fn enter_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, f: &'a Spanning) { + self.current_scope = Some(Scope::Fragment(&f.item.name.item)); + } + + fn enter_fragment_spread(&mut self, _: &mut ValidatorContext<'a>, spread: &'a Spanning) { + if let Some(ref scope) = self.current_scope { + self.spreads.entry(scope.clone()) + .or_insert_with(|| Vec::new()) + .push(&spread.item.name.item); + } + } + + fn enter_variable_definition(&mut self, _: &mut ValidatorContext<'a>, &(ref var_name, _): &'a (Spanning, VariableDefinition)) { + if let Some(Scope::Operation(ref name)) = self.current_scope { + if let Some(&mut (_, ref mut vars)) = self.defined_variables.get_mut(name) { + vars.insert(&var_name.item); + } + } + } + + fn enter_argument(&mut self, _: &mut ValidatorContext<'a>, &(_, ref value): &'a (Spanning, Spanning)) { + if let Some(ref scope) = self.current_scope { + self.used_variables + .entry(scope.clone()) + .or_insert_with(|| Vec::new()) + .append(&mut value.item + .referenced_variables() + .iter() + .map(|&var_name| Spanning::start_end( + &value.start.clone(), + &value.end.clone(), + var_name)) + .collect()); + } + } +} + +fn error_message(var_name: &str, op_name: Option<&str>) -> String { + if let Some(op_name) = op_name { + format!(r#"Variable "${}" is not defined by operation "{}""#, var_name, op_name) + } + else { + format!(r#"Variable "${}" is not defined"#, var_name) + } +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn all_variables_defined() { + expect_passes_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + field(a: $a, b: $b, c: $c) + } + "#); + } + + #[test] + fn all_variables_deeply_defined() { + expect_passes_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + field(a: $a) { + field(b: $b) { + field(c: $c) + } + } + } + "#); + } + + #[test] + fn all_variables_deeply_defined_in_inline_fragments_defined() { + expect_passes_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + ... on Type { + field(a: $a) { + field(b: $b) { + ... on Type { + field(c: $c) + } + } + } + } + } + "#); + } + + #[test] + fn all_variables_in_fragments_deeply_defined() { + expect_passes_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field(c: $c) + } + "#); + } + + #[test] + fn variable_within_single_fragment_defined_in_multiple_operations() { + expect_passes_rule(factory, r#" + query Foo($a: String) { + ...FragA + } + query Bar($a: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) + } + "#); + } + + #[test] + fn variable_within_fragments_defined_in_operations() { + expect_passes_rule(factory, r#" + query Foo($a: String) { + ...FragA + } + query Bar($b: String) { + ...FragB + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + "#); + } + + #[test] + fn variable_within_recursive_fragment_defined() { + expect_passes_rule(factory, r#" + query Foo($a: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragA + } + } + "#); + } + + #[test] + fn variable_not_defined() { + expect_fails_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + field(a: $a, b: $b, c: $c, d: $d) + } + "#, + &[ + RuleError::new(&error_message("d", Some("Foo")), &[ + SourcePosition::new(101, 2, 42), + SourcePosition::new(11, 1, 10), + ]), + ]); + } + + #[test] + fn variable_not_defined_by_unnamed_query() { + expect_fails_rule(factory, r#" + { + field(a: $a) + } + "#, + &[ + RuleError::new(&error_message("a", None), &[ + SourcePosition::new(34, 2, 21), + SourcePosition::new(11, 1, 10), + ]), + ]); + } + + #[test] + fn multiple_variables_not_defined() { + expect_fails_rule(factory, r#" + query Foo($b: String) { + field(a: $a, b: $b, c: $c) + } + "#, + &[ + RuleError::new(&error_message("a", Some("Foo")), &[ + SourcePosition::new(56, 2, 21), + SourcePosition::new(11, 1, 10), + ]), + RuleError::new(&error_message("c", Some("Foo")), &[ + SourcePosition::new(70, 2, 35), + SourcePosition::new(11, 1, 10), + ]), + ]); + } + + #[test] + fn variable_in_fragment_not_defined_by_unnamed_query() { + expect_fails_rule(factory, r#" + { + ...FragA + } + fragment FragA on Type { + field(a: $a) + } + "#, + &[ + RuleError::new(&error_message("a", None), &[ + SourcePosition::new(102, 5, 21), + SourcePosition::new(11, 1, 10), + ]), + ]); + } + + #[test] + fn variable_in_fragment_not_defined_by_operation() { + expect_fails_rule(factory, r#" + query Foo($a: String, $b: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field(c: $c) + } + "#, + &[ + RuleError::new(&error_message("c", Some("Foo")), &[ + SourcePosition::new(358, 15, 21), + SourcePosition::new(11, 1, 10), + ]), + ]); + } + + #[test] + fn multiple_variables_in_fragments_not_defined() { + expect_fails_rule(factory, r#" + query Foo($b: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field(c: $c) + } + "#, + &[ + RuleError::new(&error_message("a", Some("Foo")), &[ + SourcePosition::new(124, 5, 21), + SourcePosition::new(11, 1, 10), + ]), + RuleError::new(&error_message("c", Some("Foo")), &[ + SourcePosition::new(346, 15, 21), + SourcePosition::new(11, 1, 10), + ]), + ]); + } + + #[test] + fn single_variable_in_fragment_not_defined_by_multiple_operations() { + expect_fails_rule(factory, r#" + query Foo($a: String) { + ...FragAB + } + query Bar($a: String) { + ...FragAB + } + fragment FragAB on Type { + field(a: $a, b: $b) + } + "#, + &[ + RuleError::new(&error_message("b", Some("Foo")), &[ + SourcePosition::new(201, 8, 28), + SourcePosition::new(11, 1, 10), + ]), + RuleError::new(&error_message("b", Some("Bar")), &[ + SourcePosition::new(201, 8, 28), + SourcePosition::new(79, 4, 10), + ]), + ]); + } + + #[test] + fn variables_in_fragment_not_defined_by_multiple_operations() { + expect_fails_rule(factory, r#" + query Foo($b: String) { + ...FragAB + } + query Bar($a: String) { + ...FragAB + } + fragment FragAB on Type { + field(a: $a, b: $b) + } + "#, + &[ + RuleError::new(&error_message("a", Some("Foo")), &[ + SourcePosition::new(194, 8, 21), + SourcePosition::new(11, 1, 10), + ]), + RuleError::new(&error_message("b", Some("Bar")), &[ + SourcePosition::new(201, 8, 28), + SourcePosition::new(79, 4, 10), + ]), + ]); + } + + #[test] + fn variable_in_fragment_used_by_other_operation() { + expect_fails_rule(factory, r#" + query Foo($b: String) { + ...FragA + } + query Bar($a: String) { + ...FragB + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + "#, + &[ + RuleError::new(&error_message("a", Some("Foo")), &[ + SourcePosition::new(191, 8, 21), + SourcePosition::new(11, 1, 10), + ]), + RuleError::new(&error_message("b", Some("Bar")), &[ + SourcePosition::new(263, 11, 21), + SourcePosition::new(78, 4, 10), + ]), + ]); + } + + #[test] + fn multiple_undefined_variables_produce_multiple_errors() { + expect_fails_rule(factory, r#" + query Foo($b: String) { + ...FragAB + } + query Bar($a: String) { + ...FragAB + } + fragment FragAB on Type { + field1(a: $a, b: $b) + ...FragC + field3(a: $a, b: $b) + } + fragment FragC on Type { + field2(c: $c) + } + "#, + &[ + RuleError::new(&error_message("a", Some("Foo")), &[ + SourcePosition::new(195, 8, 22), + SourcePosition::new(11, 1, 10), + ]), + RuleError::new(&error_message("b", Some("Bar")), &[ + SourcePosition::new(202, 8, 29), + SourcePosition::new(79, 4, 10), + ]), + RuleError::new(&error_message("a", Some("Foo")), &[ + SourcePosition::new(249, 10, 22), + SourcePosition::new(11, 1, 10), + ]), + RuleError::new(&error_message("b", Some("Bar")), &[ + SourcePosition::new(256, 10, 29), + SourcePosition::new(79, 4, 10), + ]), + RuleError::new(&error_message("c", Some("Foo")), &[ + SourcePosition::new(329, 13, 22), + SourcePosition::new(11, 1, 10), + ]), + RuleError::new(&error_message("c", Some("Bar")), &[ + SourcePosition::new(329, 13, 22), + SourcePosition::new(79, 4, 10), + ]), + ]); + } +} diff --git a/src/validation/rules/no_unused_fragments.rs b/src/validation/rules/no_unused_fragments.rs new file mode 100644 index 00000000..abeff3af --- /dev/null +++ b/src/validation/rules/no_unused_fragments.rs @@ -0,0 +1,249 @@ +use std::collections::{HashSet, HashMap}; + +use ast::{Document, Definition, Operation, Fragment, FragmentSpread}; +use validation::{ValidatorContext, Visitor}; +use parser::Spanning; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Scope<'a> { + Operation(Option<&'a str>), + Fragment(&'a str), +} + +pub struct NoUnusedFragments<'a> { + spreads: HashMap, Vec<&'a str>>, + defined_fragments: HashSet>, + current_scope: Option>, +} + +pub fn factory<'a>() -> NoUnusedFragments<'a> { + NoUnusedFragments { + spreads: HashMap::new(), + defined_fragments: HashSet::new(), + current_scope: None, + } +} + +impl<'a> NoUnusedFragments<'a> { + fn find_reachable_fragments(&self, from: &Scope<'a>, result: &mut HashSet<&'a str>) { + if let Scope::Fragment(ref name) = *from { + if result.contains(name) { + return; + } + else { + result.insert(name); + } + } + + if let Some(spreads) = self.spreads.get(from) { + for spread in spreads { + self.find_reachable_fragments(&Scope::Fragment(spread.clone()), result) + } + } + } +} + +impl<'a> Visitor<'a> for NoUnusedFragments<'a> { + fn exit_document(&mut self, ctx: &mut ValidatorContext<'a>, defs: &'a Document) { + let mut reachable = HashSet::new(); + + for def in defs { + if let Definition::Operation(Spanning { item: Operation { ref name, .. }, ..}) = *def { + let op_name = name.as_ref().map(|s| s.item.as_str()); + self.find_reachable_fragments(&Scope::Operation(op_name), &mut reachable); + } + } + + for fragment in &self.defined_fragments { + if !reachable.contains(&fragment.item) { + ctx.report_error( + &error_message(&fragment.item), + &[fragment.start.clone()]); + } + } + } + + fn enter_operation_definition(&mut self, _: &mut ValidatorContext<'a>, op: &'a Spanning) { + let op_name = op.item.name.as_ref().map(|s| s.item.as_ref()); + self.current_scope = Some(Scope::Operation(op_name)); + } + + fn enter_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, f: &'a Spanning) { + self.current_scope = Some(Scope::Fragment(&f.item.name.item)); + self.defined_fragments.insert(Spanning::start_end( + &f.start, + &f.end, + &f.item.name.item)); + } + + fn enter_fragment_spread(&mut self, _: &mut ValidatorContext<'a>, spread: &'a Spanning) { + if let Some(ref scope) = self.current_scope { + self.spreads.entry(scope.clone()) + .or_insert_with(|| Vec::new()) + .push(&spread.item.name.item); + } + } +} + +fn error_message(frag_name: &str) -> String { + format!(r#"Fragment "{}" is never used"#, frag_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn all_fragment_names_are_used() { + expect_passes_rule(factory, r#" + { + human(id: 4) { + ...HumanFields1 + ... on Human { + ...HumanFields2 + } + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + "#); + } + + #[test] + fn all_fragment_names_are_used_by_multiple_operations() { + expect_passes_rule(factory, r#" + query Foo { + human(id: 4) { + ...HumanFields1 + } + } + query Bar { + human(id: 4) { + ...HumanFields2 + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + "#); + } + + #[test] + fn contains_unknown_fragments() { + expect_fails_rule(factory, r#" + query Foo { + human(id: 4) { + ...HumanFields1 + } + } + query Bar { + human(id: 4) { + ...HumanFields2 + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + fragment Unused1 on Human { + name + } + fragment Unused2 on Human { + name + } + "#, + &[ + RuleError::new(&error_message("Unused1"), &[ + SourcePosition::new(465, 21, 10), + ]), + RuleError::new(&error_message("Unused2"), &[ + SourcePosition::new(532, 24, 10), + ]), + ]); + } + + #[test] + fn contains_unknown_fragments_with_ref_cycle() { + expect_fails_rule(factory, r#" + query Foo { + human(id: 4) { + ...HumanFields1 + } + } + query Bar { + human(id: 4) { + ...HumanFields2 + } + } + fragment HumanFields1 on Human { + name + ...HumanFields3 + } + fragment HumanFields2 on Human { + name + } + fragment HumanFields3 on Human { + name + } + fragment Unused1 on Human { + name + ...Unused2 + } + fragment Unused2 on Human { + name + ...Unused1 + } + "#, + &[ + RuleError::new(&error_message("Unused1"), &[ + SourcePosition::new(465, 21, 10), + ]), + RuleError::new(&error_message("Unused2"), &[ + SourcePosition::new(555, 25, 10), + ]), + ]); + } + + #[test] + fn contains_unknown_and_undef_fragments() { + expect_fails_rule(factory, r#" + query Foo { + human(id: 4) { + ...bar + } + } + fragment foo on Human { + name + } + "#, + &[ + RuleError::new(&error_message("foo"), &[ + SourcePosition::new(107, 6, 10), + ]), + ]); + } +} diff --git a/src/validation/rules/no_unused_variables.rs b/src/validation/rules/no_unused_variables.rs new file mode 100644 index 00000000..a25e923e --- /dev/null +++ b/src/validation/rules/no_unused_variables.rs @@ -0,0 +1,351 @@ +use std::collections::{HashSet, HashMap}; +use ast::{Document, Fragment, FragmentSpread, VariableDefinition, Operation, InputValue}; +use validation::{ValidatorContext, Visitor, RuleError}; +use parser::Spanning; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Scope<'a> { + Operation(Option<&'a str>), + Fragment(&'a str), +} + +pub struct NoUnusedVariables<'a> { + defined_variables: HashMap, HashSet<&'a Spanning>>, + used_variables: HashMap, Vec<&'a str>>, + current_scope: Option>, + spreads: HashMap, Vec<&'a str>>, +} + +pub fn factory<'a>() -> NoUnusedVariables<'a> { + NoUnusedVariables { + defined_variables: HashMap::new(), + used_variables: HashMap::new(), + current_scope: None, + spreads: HashMap::new(), + } +} + +impl<'a> NoUnusedVariables<'a> { + fn find_used_vars(&self, from: &Scope<'a>, defined: &HashSet<&'a str>, used: &mut HashSet<&'a str>, visited: &mut HashSet>) { + if visited.contains(from) { + return; + } + + visited.insert(from.clone()); + + if let Some(used_vars) = self.used_variables.get(from) { + for var in used_vars { + if defined.contains(var) { + used.insert(var); + } + } + } + + if let Some(spreads) = self.spreads.get(from) { + for spread in spreads { + self.find_used_vars(&Scope::Fragment(spread.clone()), defined, used, visited); + } + } + } +} + +impl<'a> Visitor<'a> for NoUnusedVariables<'a> { + fn exit_document(&mut self, ctx: &mut ValidatorContext<'a>, _: &'a Document) { + for (op_name, def_vars) in &self.defined_variables { + let mut used = HashSet::new(); + let mut visited = HashSet::new(); + self.find_used_vars( + &Scope::Operation(op_name.clone()), + &def_vars.iter().map(|def| def.item.as_str()).collect(), + &mut used, + &mut visited); + + ctx.append_errors(def_vars + .iter() + .filter(|var| !used.contains(var.item.as_str())) + .map(|var| RuleError::new( + &error_message(&var.item, op_name.clone()), + &[var.start.clone()])) + .collect()); + } + } + + fn enter_operation_definition(&mut self, _: &mut ValidatorContext<'a>, op: &'a Spanning) { + let op_name = op.item.name.as_ref().map(|s| s.item.as_str()); + self.current_scope = Some(Scope::Operation(op_name.clone())); + self.defined_variables.insert(op_name, HashSet::new()); + } + + fn enter_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, f: &'a Spanning) { + self.current_scope = Some(Scope::Fragment(&f.item.name.item)); + } + + fn enter_fragment_spread(&mut self, _: &mut ValidatorContext<'a>, spread: &'a Spanning) { + if let Some(ref scope) = self.current_scope { + self.spreads.entry(scope.clone()) + .or_insert_with(|| Vec::new()) + .push(&spread.item.name.item); + } + } + + fn enter_variable_definition(&mut self, _: &mut ValidatorContext<'a>, &(ref var_name, _): &'a (Spanning, VariableDefinition)) { + if let Some(Scope::Operation(ref name)) = self.current_scope { + if let Some(vars) = self.defined_variables.get_mut(name) { + vars.insert(var_name); + } + } + } + + fn enter_argument(&mut self, _: &mut ValidatorContext<'a>, &(_, ref value): &'a (Spanning, Spanning)) { + if let Some(ref scope) = self.current_scope { + self.used_variables + .entry(scope.clone()) + .or_insert_with(|| Vec::new()) + .append(&mut value.item.referenced_variables()); + } + } +} + +fn error_message(var_name: &str, op_name: Option<&str>) -> String { + if let Some(op_name) = op_name { + format!(r#"Variable "${}" is not defined by operation "{}""#, var_name, op_name) + } + else { + format!(r#"Variable "${}" is not defined"#, var_name) + } +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn uses_all_variables() { + expect_passes_rule(factory, r#" + query ($a: String, $b: String, $c: String) { + field(a: $a, b: $b, c: $c) + } + "#); + } + + #[test] + fn uses_all_variables_deeply() { + expect_passes_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + field(a: $a) { + field(b: $b) { + field(c: $c) + } + } + } + "#); + } + + #[test] + fn uses_all_variables_deeply_in_inline_fragments() { + expect_passes_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + ... on Type { + field(a: $a) { + field(b: $b) { + ... on Type { + field(c: $c) + } + } + } + } + } + "#); + } + + #[test] + fn uses_all_variables_in_fragments() { + expect_passes_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field(c: $c) + } + "#); + } + + #[test] + fn variable_used_by_fragment_in_multiple_operations() { + expect_passes_rule(factory, r#" + query Foo($a: String) { + ...FragA + } + query Bar($b: String) { + ...FragB + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + "#); + } + + #[test] + fn variable_used_by_recursive_fragment() { + expect_passes_rule(factory, r#" + query Foo($a: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragA + } + } + "#); + } + + #[test] + fn variable_not_used() { + expect_fails_rule(factory, r#" + query ($a: String, $b: String, $c: String) { + field(a: $a, b: $b) + } + "#, + &[ + RuleError::new(&error_message("c", None), &[ + SourcePosition::new(42, 1, 41), + ]), + ]); + } + + #[test] + fn multiple_variables_not_used_1() { + expect_fails_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + field(b: $b) + } + "#, + &[ + RuleError::new(&error_message("a", Some("Foo")), &[ + SourcePosition::new(21, 1, 20), + ]), + RuleError::new(&error_message("c", Some("Foo")), &[ + SourcePosition::new(45, 1, 44), + ]), + ]); + } + + #[test] + fn variable_not_used_in_fragment() { + expect_fails_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field + } + "#, + &[ + RuleError::new(&error_message("c", Some("Foo")), &[ + SourcePosition::new(45, 1, 44), + ]), + ]); + } + + #[test] + fn multiple_variables_not_used_2() { + expect_fails_rule(factory, r#" + query Foo($a: String, $b: String, $c: String) { + ...FragA + } + fragment FragA on Type { + field { + ...FragB + } + } + fragment FragB on Type { + field(b: $b) { + ...FragC + } + } + fragment FragC on Type { + field + } + "#, + &[ + RuleError::new(&error_message("a", Some("Foo")), &[ + SourcePosition::new(21, 1, 20), + ]), + RuleError::new(&error_message("c", Some("Foo")), &[ + SourcePosition::new(45, 1, 44), + ]), + ]); + } + + #[test] + fn variable_not_used_by_unreferenced_fragment() { + expect_fails_rule(factory, r#" + query Foo($b: String) { + ...FragA + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + "#, + &[ + RuleError::new(&error_message("b", Some("Foo")), &[ + SourcePosition::new(21, 1, 20), + ]), + ]); + } + + #[test] + fn variable_not_used_by_fragment_used_by_other_operation() { + expect_fails_rule(factory, r#" + query Foo($b: String) { + ...FragA + } + query Bar($a: String) { + ...FragB + } + fragment FragA on Type { + field(a: $a) + } + fragment FragB on Type { + field(b: $b) + } + "#, + &[ + RuleError::new(&error_message("b", Some("Foo")), &[ + SourcePosition::new(21, 1, 20), + ]), + RuleError::new(&error_message("a", Some("Bar")), &[ + SourcePosition::new(88, 4, 20), + ]), + ]); + } +} diff --git a/src/validation/rules/overlapping_fields_can_be_merged.rs b/src/validation/rules/overlapping_fields_can_be_merged.rs new file mode 100644 index 00000000..8aca41a0 --- /dev/null +++ b/src/validation/rules/overlapping_fields_can_be_merged.rs @@ -0,0 +1,1743 @@ +use std::collections::HashMap; +use std::cell::RefCell; +use std::hash::Hash; +use std::borrow::Borrow; +use ast::{Arguments, Document, Definition, Fragment, FragmentSpread, Selection, Field, Type}; +use validation::{ValidatorContext, Visitor}; +use parser::{SourcePosition, Spanning}; +use schema::meta::{Field as FieldType, MetaType}; + +#[derive(Debug)] +struct Conflict(ConflictReason, Vec, Vec); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct ConflictReason(String, ConflictReasonMessage); + +#[derive(Debug)] +struct AstAndDef<'a>(Option<&'a str>, &'a Spanning, Option<&'a FieldType>); + +type AstAndDefCollection<'a> = OrderedMap<&'a str, Vec>>; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +enum ConflictReasonMessage { + Message(String), + Nested(Vec), +} + +struct PairSet<'a> { + data: HashMap<&'a str, HashMap<&'a str, bool>>, +} + +struct OrderedMap { + data: HashMap, + insert_order: Vec, +} + +struct OrderedMapIter<'a, K: 'a, V: 'a> { + map: &'a HashMap, + inner: ::std::slice::Iter<'a, K> +} + +impl OrderedMap { + fn new() -> OrderedMap { + OrderedMap { + data: HashMap::new(), + insert_order: Vec::new(), + } + } + + fn iter<'a>(&'a self) -> OrderedMapIter<'a, K, V> { + OrderedMapIter { + map: &self.data, + inner: self.insert_order.iter(), + } + } + + fn get(&self, k: &Q) -> Option<&V> where K: Borrow, Q: Hash + Eq { + self.data.get(k) + } + + fn get_mut(&mut self, k: &Q) -> Option<&mut V> where K: Borrow, Q: Hash + Eq { + self.data.get_mut(k) + } + + fn contains_key(&self, k: &Q) -> bool where K: Borrow, Q: Hash + Eq { + self.data.contains_key(k) + } + + fn insert(&mut self, k: K, v: V) -> Option { + let result = self.data.insert(k.clone(), v); + if result.is_none() { + self.insert_order.push(k); + } + result + } +} + +impl<'a, K: Eq + Hash + 'a, V: 'a> Iterator for OrderedMapIter<'a, K, V> { + type Item = (&'a K, &'a V); + + fn next(&mut self) -> Option { + self.inner.next() + .and_then(|key| self.map.get(key).map(|value| (key, value))) + } +} + +impl<'a> PairSet<'a> { + fn new() -> PairSet<'a> { + PairSet { + data: HashMap::new(), + } + } + + fn contains(&self, a: &'a str, b: &'a str, mutex: bool) -> bool { + if let Some(result) = self.data.get(a).and_then(|s| s.get(b)) { + if !mutex { + !result + } + else { + true + } + } + else { + false + } + } + + fn insert(&mut self, a: &'a str, b: &'a str, mutex: bool) { + self.data.entry(a) + .or_insert_with(|| HashMap::new()) + .insert(b, mutex); + + self.data.entry(b) + .or_insert_with(|| HashMap::new()) + .insert(a, mutex); + } +} + +pub struct OverlappingFieldsCanBeMerged<'a> { + named_fragments: HashMap<&'a str, &'a Fragment>, + compared_fragments: RefCell>, +} + +pub fn factory<'a>() -> OverlappingFieldsCanBeMerged<'a> { + OverlappingFieldsCanBeMerged { + named_fragments: HashMap::new(), + compared_fragments: RefCell::new(PairSet::new()), + } +} + +impl<'a> OverlappingFieldsCanBeMerged<'a> { + fn find_conflicts_within_selection_set( + &self, + parent_type: Option<&'a MetaType>, + selection_set: &'a [Selection], + ctx: &ValidatorContext<'a>, + ) + -> Vec + { + let mut conflicts = Vec::new(); + + let (field_map, fragment_names) = self.get_fields_and_fragment_names(parent_type, selection_set, ctx); + + self.collect_conflicts_within(&mut conflicts, &field_map, ctx); + + for (i, frag_name1) in fragment_names.iter().enumerate() { + self.collect_conflicts_between_fields_and_fragment( + &mut conflicts, + &field_map, + &frag_name1, + false, + ctx); + + for frag_name2 in &fragment_names[i+1..] { + self.collect_conflicts_between_fragments( + &mut conflicts, + &frag_name1, + &frag_name2, + false, + ctx); + } + } + + conflicts + } + + fn collect_conflicts_between_fragments( + &self, + conflicts: &mut Vec, + fragment_name1: &'a str, + fragment_name2: &'a str, + mutually_exclusive: bool, + ctx: &ValidatorContext<'a>, + ) + { + if fragment_name1 == fragment_name2 { + return; + } + + let fragment1 = match self.named_fragments.get(fragment_name1) { + Some(f) => f, + None => return, + }; + + let fragment2 = match self.named_fragments.get(fragment_name2) { + Some(f) => f, + None => return, + }; + + { + if self.compared_fragments.borrow().contains(fragment_name1, fragment_name2, mutually_exclusive) { + return; + } + } + + { + self.compared_fragments.borrow_mut().insert(fragment_name1, fragment_name2, mutually_exclusive); + } + + let (field_map1, fragment_names1) = self.get_referenced_fields_and_fragment_names(fragment1, ctx); + let (field_map2, fragment_names2) = self.get_referenced_fields_and_fragment_names(fragment2, ctx); + + self.collect_conflicts_between( + conflicts, + mutually_exclusive, + &field_map1, + &field_map2, + ctx); + + for fragment_name2 in &fragment_names2 { + self.collect_conflicts_between_fragments( + conflicts, + fragment_name1, + fragment_name2, + mutually_exclusive, + ctx); + } + + for fragment_name1 in &fragment_names1 { + self.collect_conflicts_between_fragments( + conflicts, + fragment_name1, + fragment_name2, + mutually_exclusive, + ctx); + } + } + + fn collect_conflicts_between_fields_and_fragment( + &self, + conflicts: &mut Vec, + field_map: &AstAndDefCollection<'a>, + fragment_name: &str, + mutually_exclusive: bool, + ctx: &ValidatorContext<'a>, + ) + { + let fragment = match self.named_fragments.get(fragment_name) { + Some(f) => f, + None => return, + }; + + let (field_map2, fragment_names2) = self.get_referenced_fields_and_fragment_names(fragment, ctx); + + self.collect_conflicts_between( + conflicts, + mutually_exclusive, + field_map, + &field_map2, + ctx); + + for fragment_name2 in fragment_names2 { + self.collect_conflicts_between_fields_and_fragment( + conflicts, + field_map, + &fragment_name2, + mutually_exclusive, + ctx); + } + } + + fn collect_conflicts_between( + &self, + conflicts: &mut Vec, + mutually_exclusive: bool, + field_map1: &AstAndDefCollection<'a>, + field_map2: &AstAndDefCollection<'a>, + ctx: &ValidatorContext<'a>, + ) + { + for (response_name, fields1) in field_map1.iter() { + if let Some(fields2) = field_map2.get(response_name) { + for field1 in fields1 { + for field2 in fields2 { + if let Some(conflict) = self.find_conflict(&response_name, field1, field2, mutually_exclusive, ctx) { + conflicts.push(conflict); + } + } + } + } + } + } + + fn collect_conflicts_within( + &self, + conflicts: &mut Vec, + field_map: &AstAndDefCollection<'a>, + ctx: &ValidatorContext<'a>, + ) + { + for (response_name, fields) in field_map.iter() { + for (i, field1) in fields.iter().enumerate() { + for field2 in &fields[i+1..] { + if let Some(conflict) = self.find_conflict(&response_name, field1, field2, false, ctx) { + conflicts.push(conflict); + } + } + } + } + } + + fn find_conflict( + &self, + response_name: &str, + field1: &AstAndDef<'a>, + field2: &AstAndDef<'a>, + parents_mutually_exclusive: bool, + ctx: &ValidatorContext<'a>, + ) + -> Option + { + let AstAndDef(ref parent_type1, ref ast1, ref def1) = *field1; + let AstAndDef(ref parent_type2, ref ast2, ref def2) = *field2; + + let mutually_exclusive = parents_mutually_exclusive + || (parent_type1 != parent_type2 + && self.is_object_type(ctx, parent_type1.clone()) + && self.is_object_type(ctx, parent_type2.clone())); + + if !mutually_exclusive { + let name1 = &ast1.item.name.item; + let name2 = &ast2.item.name.item; + + if name1 != name2 { + return Some(Conflict( + ConflictReason( + response_name.to_owned(), + ConflictReasonMessage::Message(format!( + "{} and {} are different fields", name1, name2))), + vec![ast1.start.clone()], + vec![ast2.start.clone()])); + } + + if !self.is_same_arguments(&ast1.item.arguments, &ast2.item.arguments) { + return Some(Conflict( + ConflictReason( + response_name.to_owned(), + ConflictReasonMessage::Message( + "they have differing arguments".to_owned())), + vec![ast1.start.clone()], + vec![ast2.start.clone()])); + } + } + + let t1 = def1.as_ref().map(|def| &def.field_type); + let t2 = def2.as_ref().map(|def| &def.field_type); + + if let (Some(t1), Some(t2)) = (t1, t2) { + if self.is_type_conflict(ctx, &t1, &t2) { + return Some(Conflict( + ConflictReason( + response_name.to_owned(), + ConflictReasonMessage::Message(format!( + "they return conflicting types {} and {}", t1, t2))), + vec![ast1.start.clone()], + vec![ast2.start.clone()])); + } + } + + if let (&Some(ref s1), &Some(ref s2)) = (&ast1.item.selection_set, &ast2.item.selection_set) { + let conflicts = self.find_conflicts_between_sub_selection_sets( + mutually_exclusive, + t1.map(|t| t.innermost_name()), + &s1, + t2.map(|t| t.innermost_name()), + &s2, + ctx); + + return self.subfield_conflicts( + &conflicts, + response_name, + &ast1.start, + &ast2.start); + } + + None + } + + fn find_conflicts_between_sub_selection_sets( + &self, + mutually_exclusive: bool, + parent_type1: Option<&str>, + selection_set1: &'a [Selection], + parent_type2: Option<&str>, + selection_set2: &'a [Selection], + ctx: &ValidatorContext<'a>, + ) + -> Vec + { + let mut conflicts = Vec::new(); + + let parent_type1 = parent_type1.and_then(|t| ctx.schema.concrete_type_by_name(t)); + let parent_type2 = parent_type2.and_then(|t| ctx.schema.concrete_type_by_name(t)); + + let (field_map1, fragment_names1) = self.get_fields_and_fragment_names(parent_type1, selection_set1, ctx); + let (field_map2, fragment_names2) = self.get_fields_and_fragment_names(parent_type2, selection_set2, ctx); + + self.collect_conflicts_between( + &mut conflicts, + mutually_exclusive, + &field_map1, + &field_map2, + ctx); + + for fragment_name in &fragment_names2 { + self.collect_conflicts_between_fields_and_fragment( + &mut conflicts, + &field_map1, + fragment_name, + mutually_exclusive, + ctx); + } + + for fragment_name in &fragment_names1 { + self.collect_conflicts_between_fields_and_fragment( + &mut conflicts, + &field_map2, + fragment_name, + mutually_exclusive, + ctx); + } + + for fragment_name1 in &fragment_names1 { + for fragment_name2 in &fragment_names2 { + self.collect_conflicts_between_fragments( + &mut conflicts, + fragment_name1, + fragment_name2, + mutually_exclusive, + ctx); + } + } + + conflicts + } + + fn subfield_conflicts( + &self, + conflicts: &[Conflict], + response_name: &str, + pos1: &SourcePosition, + pos2: &SourcePosition, + ) + -> Option + { + if conflicts.is_empty() { + return None; + } + + Some(Conflict( + ConflictReason( + response_name.to_owned(), + ConflictReasonMessage::Nested(conflicts.iter().map(|c| c.0.clone()).collect())), + vec![pos1.clone()].into_iter() + .chain(conflicts.iter().flat_map(|&Conflict(_, ref fs1, _)| fs1.clone())) + .collect(), + vec![pos2.clone()].into_iter() + .chain(conflicts.iter().flat_map(|&Conflict(_, _, ref fs2)| fs2.clone())) + .collect())) + } + + fn is_type_conflict( + &self, + ctx: &ValidatorContext<'a>, + t1: &Type, + t2: &Type, + ) + -> bool + { + match (t1, t2) { + (&Type::List(ref inner1), &Type::List(ref inner2)) => + self.is_type_conflict(ctx, inner1, inner2), + (&Type::NonNullList(ref inner1), &Type::NonNullList(ref inner2)) => + self.is_type_conflict(ctx, inner1, inner2), + (&Type::NonNullNamed(ref n1), &Type::NonNullNamed(ref n2)) | + (&Type::Named(ref n1), &Type::Named(ref n2)) => { + let ct1 = ctx.schema.concrete_type_by_name(n1); + let ct2 = ctx.schema.concrete_type_by_name(n2); + + if ct1.map(|ct| ct.is_leaf()).unwrap_or(false) + || ct2.map(|ct| ct.is_leaf()).unwrap_or(false) + { + n1 != n2 + } + else { + false + } + } + _ => true, + } + } + + fn is_same_arguments( + &self, + args1: &Option>, + args2: &Option>, + ) + -> bool + { + match (args1, args2) { + (&None, &None) => true, + (&Some(Spanning { item: ref args1, .. }), &Some(Spanning { item: ref args2, .. })) => { + if args1.len() != args2.len() { + return false; + } + + args1.iter().all(|&(ref n1, ref v1)| { + if let Some(&(_, ref v2)) = args2.iter().filter(|&&(ref n2, _)| n1.item == n2.item).next() { + v1.item.unlocated_eq(&v2.item) + } + else { + false + } + }) + }, + _ => false + } + } + + fn is_object_type( + &self, + ctx: &ValidatorContext<'a>, + type_name: Option<&str>, + ) + -> bool + { + match type_name.and_then(|n| ctx.schema.concrete_type_by_name(n)) { + Some(&MetaType::Object(_)) => true, + _ => false + } + } + + fn get_referenced_fields_and_fragment_names( + &self, + fragment: &'a Fragment, + ctx: &ValidatorContext<'a>, + ) + -> (AstAndDefCollection<'a>, Vec<&'a str>) + { + let fragment_type = ctx.schema.concrete_type_by_name(&fragment.type_condition.item); + + self.get_fields_and_fragment_names(fragment_type, &fragment.selection_set, ctx) + } + + fn get_fields_and_fragment_names( + &self, + parent_type: Option<&'a MetaType>, + selection_set: &'a [Selection], + ctx: &ValidatorContext<'a>, + ) + -> (AstAndDefCollection<'a>, Vec<&'a str>) + { + let mut ast_and_defs = OrderedMap::new(); + let mut fragment_names = Vec::new(); + + self.collect_fields_and_fragment_names(parent_type, selection_set, ctx, &mut ast_and_defs, &mut fragment_names); + + (ast_and_defs, fragment_names) + } + + fn collect_fields_and_fragment_names( + &self, + parent_type: Option<&'a MetaType>, + selection_set: &'a [Selection], + ctx: &ValidatorContext<'a>, + ast_and_defs: &mut AstAndDefCollection<'a>, + fragment_names: &mut Vec<&'a str>, + ) + { + for selection in selection_set { + match *selection { + Selection::Field(ref f) => { + let field_name = &f.item.name.item; + let field_def = parent_type.and_then(|t| t.field_by_name(field_name)); + let response_name = f.item.alias.as_ref().map(|s| &s.item).unwrap_or_else(|| &field_name); + + if !ast_and_defs.contains_key(response_name.as_str()) { + ast_and_defs.insert(response_name, Vec::new()); + } + + ast_and_defs.get_mut(response_name.as_str()).unwrap() + .push(AstAndDef(parent_type.and_then(MetaType::name), f, field_def)); + }, + Selection::FragmentSpread(Spanning { item: FragmentSpread { ref name, ..}, ..}) => { + if fragment_names.iter().filter(|n| *n == &name.item).next().is_none() { + fragment_names.push(&name.item); + } + }, + Selection::InlineFragment(Spanning { item: ref inline, .. }) => { + let parent_type = inline.type_condition.as_ref() + .and_then(|cond| ctx.schema.concrete_type_by_name(&cond.item)) + .or(parent_type); + + self.collect_fields_and_fragment_names(parent_type, &inline.selection_set, ctx, ast_and_defs, fragment_names); + } + } + } + } +} + +impl<'a> Visitor<'a> for OverlappingFieldsCanBeMerged<'a> { + fn enter_document(&mut self, _: &mut ValidatorContext<'a>, defs: &'a Document) { + for def in defs { + if let Definition::Fragment(Spanning { ref item, .. }) = *def { + self.named_fragments.insert(&item.name.item, &item); + } + } + } + + fn enter_selection_set(&mut self, ctx: &mut ValidatorContext<'a>, selection_set: &'a Vec) { + for Conflict(ConflictReason(reason_name, reason_msg), mut p1, mut p2) + in self.find_conflicts_within_selection_set(ctx.parent_type(), selection_set, ctx) + { + p1.append(&mut p2); + ctx.report_error(&error_message(&reason_name, &reason_msg), &p1); + } + } +} + +fn error_message(reason_name: &str, reason: &ConflictReasonMessage) -> String { + let suffix = "Use different aliases on the fields to fetch both if this was intentional"; + format!(r#"Fields "{}" conflict because {}. {}"#, reason_name, format_reason(reason), suffix) +} + +fn format_reason(reason: &ConflictReasonMessage) -> String { + match *reason { + ConflictReasonMessage::Message(ref name) => name.clone(), + ConflictReasonMessage::Nested(ref nested) => + nested.iter() + .map(|&ConflictReason(ref name, ref subreason)| + format!( + r#"subfields "{}" conflict because {}"#, + name, + format_reason(subreason))) + .collect::>() + .join(" and ") + } +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory, ConflictReason}; + use super::ConflictReasonMessage::*; + + use types::base::GraphQLType; + use types::schema::Registry; + use types::scalars::ID; + use schema::meta::MetaType; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule, + expect_passes_rule_with_schema, + expect_fails_rule_with_schema}; + + #[test] + fn unique_fields() { + expect_passes_rule(factory, r#" + fragment uniqueFields on Dog { + name + nickname + } + "#); + } + + #[test] + fn identical_fields() { + expect_passes_rule(factory, r#" + fragment mergeIdenticalFields on Dog { + name + name + } + "#); + } + + #[test] + fn identical_fields_with_identical_args() { + expect_passes_rule(factory, r#" + fragment mergeIdenticalFieldsWithIdenticalArgs on Dog { + doesKnowCommand(dogCommand: SIT) + doesKnowCommand(dogCommand: SIT) + } + "#); + } + + #[test] + fn identical_fields_with_identical_directives() { + expect_passes_rule(factory, r#" + fragment mergeSameFieldsWithSameDirectives on Dog { + name @include(if: true) + name @include(if: true) + } + "#); + } + + #[test] + fn different_args_with_different_aliases() { + expect_passes_rule(factory, r#" + fragment differentArgsWithDifferentAliases on Dog { + knowsSit: doesKnowCommand(dogCommand: SIT) + knowsDown: doesKnowCommand(dogCommand: DOWN) + } + "#); + } + + #[test] + fn different_directives_with_different_aliases() { + expect_passes_rule(factory, r#" + fragment differentDirectivesWithDifferentAliases on Dog { + nameIfTrue: name @include(if: true) + nameIfFalse: name @include(if: false) + } + "#); + } + + #[test] + fn different_skip_include_directives_accepted() { + expect_passes_rule(factory, r#" + fragment differentDirectivesWithDifferentAliases on Dog { + name @include(if: true) + name @include(if: false) + } + "#); + } + + #[test] + fn same_aliases_with_different_field_targets() { + expect_fails_rule(factory, r#" + fragment sameAliasesWithDifferentFieldTargets on Dog { + fido: name + fido: nickname + } + "#, + &[ + RuleError::new( + &error_message("fido", &Message("name and nickname are different fields".to_owned())), &[ + SourcePosition::new(78, 2, 12), + SourcePosition::new(101, 3, 12), + ]), + ]); + } + + #[test] + fn same_aliases_allowed_on_nonoverlapping_fields() { + expect_passes_rule(factory, r#" + fragment sameAliasesWithDifferentFieldTargets on Pet { + ... on Dog { + name + } + ... on Cat { + name: nickname + } + } + "#); + } + + #[test] + fn alias_masking_direct_field_access() { + expect_fails_rule(factory, r#" + fragment aliasMaskingDirectFieldAccess on Dog { + name: nickname + name + } + "#, + &[ + RuleError::new( + &error_message("name", &Message("nickname and name are different fields".to_owned())), &[ + SourcePosition::new(71, 2, 12), + SourcePosition::new(98, 3, 12), + ]), + ]); + } + + #[test] + fn different_args_second_adds_an_argument() { + expect_fails_rule(factory, r#" + fragment conflictingArgs on Dog { + doesKnowCommand + doesKnowCommand(dogCommand: HEEL) + } + "#, + &[ + RuleError::new( + &error_message("doesKnowCommand", &Message("they have differing arguments".to_owned())), &[ + SourcePosition::new(57, 2, 12), + SourcePosition::new(85, 3, 12), + ]), + ]); + } + + #[test] + fn different_args_second_missing_an_argument() { + expect_fails_rule(factory, r#" + fragment conflictingArgs on Dog { + doesKnowCommand(dogCommand: SIT) + doesKnowCommand + } + "#, + &[ + RuleError::new( + &error_message("doesKnowCommand", &Message("they have differing arguments".to_owned())), &[ + SourcePosition::new(57, 2, 12), + SourcePosition::new(102, 3, 12), + ]), + ]); + } + + #[test] + fn conflicting_args() { + expect_fails_rule(factory, r#" + fragment conflictingArgs on Dog { + doesKnowCommand(dogCommand: SIT) + doesKnowCommand(dogCommand: HEEL) + } + "#, + &[ + RuleError::new( + &error_message("doesKnowCommand", &Message("they have differing arguments".to_owned())), &[ + SourcePosition::new(57, 2, 12), + SourcePosition::new(102, 3, 12), + ]), + ]); + } + + #[test] + fn allows_different_args_where_no_conflict_is_possible() { + expect_passes_rule(factory, r#" + fragment conflictingArgs on Pet { + ... on Dog { + name(surname: true) + } + ... on Cat { + name + } + } + "#); + } + + #[test] + fn encounters_conflict_in_fragments() { + expect_fails_rule(factory, r#" + { + ...A + ...B + } + fragment A on Type { + x: a + } + fragment B on Type { + x: b + } + "#, + &[ + RuleError::new( + &error_message("x", &Message("a and b are different fields".to_owned())), &[ + SourcePosition::new(102, 6, 12), + SourcePosition::new(162, 9, 12), + ]), + ]); + } + + #[test] + fn reports_each_conflict_once() { + expect_fails_rule(factory, r#" + { + f1 { + ...A + ...B + } + f2 { + ...B + ...A + } + f3 { + ...A + ...B + x: c + } + } + fragment A on Type { + x: a + } + fragment B on Type { + x: b + } + "#, + &[ + RuleError::new( + &error_message("x", &Message("c and a are different fields".to_owned())), &[ + SourcePosition::new(220, 13, 14), + SourcePosition::new(294, 17, 12), + ]), + RuleError::new( + &error_message("x", &Message("c and b are different fields".to_owned())), &[ + SourcePosition::new(220, 13, 14), + SourcePosition::new(354, 20, 12), + ]), + RuleError::new( + &error_message("x", &Message("a and b are different fields".to_owned())), &[ + SourcePosition::new(294, 17, 12), + SourcePosition::new(354, 20, 12), + ]), + ]); + } + + #[test] + fn deep_conflict() { + expect_fails_rule(factory, r#" + { + field { + x: a + }, + field { + x: b + } + } + "#, + &[ + RuleError::new( + &error_message("field", + &Nested(vec![ + ConflictReason( + "x".to_owned(), + Message("a and b are different fields".to_owned()) + ), + ])), &[ + SourcePosition::new(25, 2, 12), + SourcePosition::new(47, 3, 14), + SourcePosition::new(79, 5, 12), + SourcePosition::new(101, 6, 14), + ]), + ]); + } + + #[test] + fn deep_conflict_with_multiple_issues() { + expect_fails_rule(factory, r#" + { + field { + x: a + y: c + }, + field { + x: b + y: d + } + } + "#, + &[ + RuleError::new( + &error_message("field", + &Nested(vec![ + ConflictReason( + "x".to_owned(), + Message("a and b are different fields".to_owned()) + ), + ConflictReason( + "y".to_owned(), + Message("c and d are different fields".to_owned()) + ), + ])), &[ + SourcePosition::new(25, 2, 12), + SourcePosition::new(47, 3, 14), + SourcePosition::new(66, 4, 14), + SourcePosition::new(98, 6, 12), + SourcePosition::new(120, 7, 14), + SourcePosition::new(139, 8, 14), + ]), + ]); + } + + #[test] + fn very_deep_conflict() { + expect_fails_rule(factory, r#" + { + field { + deepField { + x: a + } + }, + field { + deepField { + x: b + } + } + } + "#, + &[ + RuleError::new( + &error_message("field", + &Nested(vec![ + ConflictReason( + "deepField".to_owned(), + Nested(vec![ + ConflictReason( + "x".to_owned(), + Message("a and b are different fields".to_owned()) + ) + ]) + ), + ])), &[ + SourcePosition::new(25, 2, 12), + SourcePosition::new(47, 3, 14), + SourcePosition::new(75, 4, 16), + SourcePosition::new(123, 7, 12), + SourcePosition::new(145, 8, 14), + SourcePosition::new(173, 9, 16), + ]), + ]); + } + + #[test] + fn reports_deep_conflict_to_nearest_common_ancestor() { + expect_fails_rule(factory, r#" + { + field { + deepField { + x: a + } + deepField { + x: b + } + }, + field { + deepField { + y + } + } + } + "#, + &[ + RuleError::new( + &error_message("deepField", + &Nested(vec![ + ConflictReason( + "x".to_owned(), + Message("a and b are different fields".to_owned()) + ) + ])), &[ + SourcePosition::new(47, 3, 14), + SourcePosition::new(75, 4, 16), + SourcePosition::new(110, 6, 14), + SourcePosition::new(138, 7, 16), + ]), + ]); + } + + #[test] + fn reports_deep_conflict_to_nearest_common_ancestor_in_fragments() { + expect_fails_rule(factory, r#" + { + field { + ...F + } + field { + ...F + } + } + fragment F on T { + deepField { + deeperField { + x: a + } + deeperField { + x: b + } + }, + deepField { + deeperField { + y + } + } + } + "#, + &[ + RuleError::new( + &error_message("deeperField", + &Nested(vec![ + ConflictReason( + "x".to_owned(), + Message("a and b are different fields".to_owned()) + ) + ])), &[ + SourcePosition::new(197, 11, 14), + SourcePosition::new(227, 12, 16), + SourcePosition::new(262, 14, 14), + SourcePosition::new(292, 15, 16), + ]), + ]); + } + + #[test] + fn reports_deep_conflict_in_nested_fragments() { + expect_fails_rule(factory, r#" + { + field { + ...F + } + field { + ...I + } + } + fragment F on T { + x: a + ...G + } + fragment G on T { + y: c + } + fragment I on T { + y: d + ...J + } + fragment J on T { + x: b + } + "#, + &[ + RuleError::new( + &error_message("field", + &Nested(vec![ + ConflictReason( + "x".to_owned(), + Message("a and b are different fields".to_owned()) + ), + ConflictReason( + "y".to_owned(), + Message("c and d are different fields".to_owned()) + ), + ])), &[ + SourcePosition::new(25, 2, 12), + SourcePosition::new(171, 10, 12), + SourcePosition::new(245, 14, 12), + SourcePosition::new(78, 5, 12), + SourcePosition::new(376, 21, 12), + SourcePosition::new(302, 17, 12), + ]), + ]); + } + + #[test] + fn ignores_unknown_fragments() { + expect_passes_rule(factory, r#" + { + field + ...Unknown + ...Known + } + + fragment Known on T { + field + ...OtherUnknown + } + "#); + } + + struct SomeBox; + struct StringBox; + struct IntBox; + struct NonNullStringBox1; + struct NonNullStringBox1Impl; + struct NonNullStringBox2; + struct NonNullStringBox2Impl; + struct Connection; + struct Edge; + struct Node; + struct QueryRoot; + + impl GraphQLType for SomeBox { + fn name() -> Option<&'static str> { + Some("SomeBox") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_interface_type::()(&[ + registry.field::>("deepBox"), + registry.field::>("unrelatedField"), + ]) + .into_meta() + } + } + + impl GraphQLType for StringBox { + fn name() -> Option<&'static str> { + Some("StringBox") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("scalar"), + registry.field::>("deepBox"), + registry.field::>("unrelatedField"), + registry.field::>>>("listStringBox"), + registry.field::>("stringBox"), + registry.field::>("intBox"), + ]) + .interfaces(&[ + registry.get_type::(), + ]) + .into_meta() + } + } + + impl GraphQLType for IntBox { + fn name() -> Option<&'static str> { + Some("IntBox") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("scalar"), + registry.field::>("deepBox"), + registry.field::>("unrelatedField"), + registry.field::>>>("listStringBox"), + registry.field::>("stringBox"), + registry.field::>("intBox"), + ]) + .interfaces(&[ + registry.get_type::(), + ]) + .into_meta() + } + } + + impl GraphQLType for NonNullStringBox1 { + fn name() -> Option<&'static str> { + Some("NonNullStringBox1") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_interface_type::()(&[ + registry.field::("scalar"), + ]) + .into_meta() + } + } + + impl GraphQLType for NonNullStringBox1Impl { + fn name() -> Option<&'static str> { + Some("NonNullStringBox1Impl") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::("scalar"), + registry.field::>("deepBox"), + registry.field::>("unrelatedField"), + ]) + .interfaces(&[ + registry.get_type::(), + registry.get_type::(), + ]) + .into_meta() + } + } + + impl GraphQLType for NonNullStringBox2 { + fn name() -> Option<&'static str> { + Some("NonNullStringBox2") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_interface_type::()(&[ + registry.field::("scalar"), + ]) + .into_meta() + } + } + + impl GraphQLType for NonNullStringBox2Impl { + fn name() -> Option<&'static str> { + Some("NonNullStringBox2Impl") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::("scalar"), + registry.field::>("deepBox"), + registry.field::>("unrelatedField"), + ]) + .interfaces(&[ + registry.get_type::(), + registry.get_type::(), + ]) + .into_meta() + } + } + + impl GraphQLType for Node { + fn name() -> Option<&'static str> { + Some("Node") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("id"), + registry.field::>("name"), + ]) + .into_meta() + } + } + + impl GraphQLType for Edge { + fn name() -> Option<&'static str> { + Some("Edge") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("node"), + ]) + .into_meta() + } + } + + impl GraphQLType for Connection { + fn name() -> Option<&'static str> { + Some("Connection") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>>>("edges"), + ]) + .into_meta() + } + } + + impl GraphQLType for QueryRoot { + fn name() -> Option<&'static str> { + Some("QueryRoot") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.get_type::(); + registry.get_type::(); + registry.get_type::(); + registry.get_type::(); + + registry.build_object_type::()(&[ + registry.field::>("someBox"), + registry.field::>("connection"), + ]) + .into_meta() + } + } + + #[test] + fn conflicting_return_types_which_potentially_overlap() { + expect_fails_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ...on IntBox { + scalar + } + ...on NonNullStringBox1 { + scalar + } + } + } + "#, + &[ + RuleError::new( + &error_message( + "scalar", + &Message("they return conflicting types Int and String!".to_owned())), &[ + SourcePosition::new(88, 4, 18), + SourcePosition::new(173, 7, 18), + ]), + ]); + } + + #[test] + fn compatible_return_shapes_on_different_return_types() { + expect_passes_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ... on SomeBox { + deepBox { + unrelatedField + } + } + ... on StringBox { + deepBox { + unrelatedField + } + } + } + } + "#); + } + + #[test] + fn disallows_differing_return_types_despite_no_overlap() { + expect_fails_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ... on IntBox { + scalar + } + ... on StringBox { + scalar + } + } + } + "#, + &[ + RuleError::new( + &error_message( + "scalar", + &Message("they return conflicting types Int and String".to_owned())), &[ + SourcePosition::new(89, 4, 18), + SourcePosition::new(167, 7, 18), + ]), + ]); + } + + #[test] + fn reports_correctly_when_a_non_exclusive_follows_an_exclusive() { + expect_fails_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ... on IntBox { + deepBox { + ...X + } + } + } + someBox { + ... on StringBox { + deepBox { + ...Y + } + } + } + memoed: someBox { + ... on IntBox { + deepBox { + ...X + } + } + } + memoed: someBox { + ... on StringBox { + deepBox { + ...Y + } + } + } + other: someBox { + ...X + } + other: someBox { + ...Y + } + } + fragment X on SomeBox { + scalar + } + fragment Y on SomeBox { + scalar: unrelatedField + } + "#, + &[ + RuleError::new( + &error_message( + "other", + &Nested(vec![ + ConflictReason( + "scalar".to_owned(), + Message("scalar and unrelatedField are different fields".to_owned()) + ), + ])), &[ + SourcePosition::new(703, 30, 14), + SourcePosition::new(889, 38, 14), + SourcePosition::new(771, 33, 14), + SourcePosition::new(960, 41, 14), + ]), + ]); + } + + #[test] + fn disallows_differing_return_type_nullability_despite_no_overlap() { + expect_fails_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ... on NonNullStringBox1 { + scalar + } + ... on StringBox { + scalar + } + } + } + "#, + &[ + RuleError::new( + &error_message( + "scalar", + &Message("they return conflicting types String! and String".to_owned())), &[ + SourcePosition::new(100, 4, 18), + SourcePosition::new(178, 7, 18), + ]), + ]); + } + + #[test] + fn disallows_differing_return_type_list_despite_no_overlap() { + expect_fails_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ... on IntBox { + box: listStringBox { + scalar + } + } + ... on StringBox { + box: stringBox { + scalar + } + } + } + } + "#, + &[ + RuleError::new( + &error_message( + "box", + &Message("they return conflicting types [StringBox] and StringBox".to_owned())), &[ + SourcePosition::new(89, 4, 18), + SourcePosition::new(228, 9, 18), + ]), + ]); + + expect_fails_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ... on IntBox { + box: stringBox { + scalar + } + } + ... on StringBox { + box: listStringBox { + scalar + } + } + } + } + "#, + &[ + RuleError::new( + &error_message( + "box", + &Message("they return conflicting types StringBox and [StringBox]".to_owned())), &[ + SourcePosition::new(89, 4, 18), + SourcePosition::new(224, 9, 18), + ]), + ]); + } + + #[test] + fn disallows_differing_subfields() { + expect_fails_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ... on IntBox { + box: stringBox { + val: scalar + val: unrelatedField + } + } + ... on StringBox { + box: stringBox { + val: scalar + } + } + } + } + "#, + &[ + RuleError::new( + &error_message( + "val", + &Message("scalar and unrelatedField are different fields".to_owned())), &[ + SourcePosition::new(126, 5, 20), + SourcePosition::new(158, 6, 20), + ]), + ]); + } + + #[test] + fn disallows_differing_deep_return_types_despite_no_overlap() { + expect_fails_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ... on IntBox { + box: stringBox { + scalar + } + } + ... on StringBox { + box: intBox { + scalar + } + } + } + } + "#, + &[ + RuleError::new( + &error_message( + "box", + &Nested(vec![ + ConflictReason( + "scalar".to_owned(), + Message("they return conflicting types String and Int".to_owned()) + ) + ])), &[ + SourcePosition::new(89, 4, 18), + SourcePosition::new(126, 5, 20), + SourcePosition::new(224, 9, 18), + SourcePosition::new(258, 10, 20), + ]), + ]); + } + + #[test] + fn allows_non_conflicting_overlapping_types() { + expect_passes_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ... on IntBox { + scalar: unrelatedField + } + ... on StringBox { + scalar + } + } + } + "#); + } + + #[test] + fn same_wrapped_scalar_return_types() { + expect_passes_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ...on NonNullStringBox1 { + scalar + } + ...on NonNullStringBox2 { + scalar + } + } + } + "#); + } + + #[test] + fn allows_inline_typeless_fragments() { + expect_passes_rule_with_schema(QueryRoot, factory, r#" + { + a + ... { + a + } + } + "#); + } + + #[test] + fn compares_deep_types_including_list() { + expect_fails_rule_with_schema(QueryRoot, factory, r#" + { + connection { + ...edgeID + edges { + node { + id: name + } + } + } + } + + fragment edgeID on Connection { + edges { + node { + id + } + } + } + "#, + &[ + RuleError::new( + &error_message( + "edges", + &Nested(vec![ + ConflictReason( + "node".to_owned(), + Nested(vec![ + ConflictReason( + "id".to_owned(), + Message("name and id are different fields".to_owned()) + ) + ]), + ) + ])), &[ + SourcePosition::new(84, 4, 16), + SourcePosition::new(110, 5, 18), + SourcePosition::new(137, 6, 20), + SourcePosition::new(273, 13, 14), + SourcePosition::new(297, 14, 16), + SourcePosition::new(322, 15, 18), + ]), + ]); + } + + #[test] + fn ignores_unknown_types() { + expect_passes_rule_with_schema(QueryRoot, factory, r#" + { + someBox { + ...on UnknownType { + scalar + } + ...on NonNullStringBox2 { + scalar + } + } + } + "#); + } + + #[test] + fn error_message_contains_hint_for_alias_conflict() { + assert_eq!( + &error_message( + "x", + &Message("a and b are different fields".to_owned())), + "Fields \"x\" conflict because a and b are different fields. Use \ + different aliases on the fields to fetch both if this \ + was intentional"); + } +} diff --git a/src/validation/rules/possible_fragment_spreads.rs b/src/validation/rules/possible_fragment_spreads.rs new file mode 100644 index 00000000..3389f273 --- /dev/null +++ b/src/validation/rules/possible_fragment_spreads.rs @@ -0,0 +1,314 @@ +use std::collections::HashMap; +use ast::{Document, Definition, InlineFragment, FragmentSpread}; +use validation::{ValidatorContext, Visitor}; +use parser::Spanning; +use schema::meta::MetaType; + +pub struct PossibleFragmentSpreads<'a> { + fragment_types: HashMap<&'a str, &'a MetaType>, +} + +pub fn factory<'a>() -> PossibleFragmentSpreads<'a> { + PossibleFragmentSpreads { + fragment_types: HashMap::new(), + } +} + +impl<'a> Visitor<'a> for PossibleFragmentSpreads<'a> { + fn enter_document(&mut self, ctx: &mut ValidatorContext<'a>, defs: &'a Document) { + for def in defs { + if let Definition::Fragment(Spanning { ref item, .. }) = *def { + if let Some(t) = ctx.schema.concrete_type_by_name(&item.type_condition.item) { + self.fragment_types.insert(&item.name.item, t); + } + } + } + } + + fn enter_inline_fragment(&mut self, ctx: &mut ValidatorContext<'a>, frag: &'a Spanning) { + if let (Some(ref parent_type), Some(ref frag_type)) + = (ctx.parent_type(), frag.item.type_condition.as_ref().and_then(|s| ctx.schema.concrete_type_by_name(&s.item))) + { + if !ctx.schema.type_overlap(parent_type, frag_type) { + ctx.report_error( + &error_message( + None, + parent_type.name().unwrap_or(""), + frag_type.name().unwrap_or("")), + &[frag.start.clone()]); + } + } + } + + fn enter_fragment_spread(&mut self, ctx: &mut ValidatorContext<'a>, spread: &'a Spanning) { + if let (Some(ref parent_type), Some(ref frag_type)) + = (ctx.parent_type(), self.fragment_types.get(spread.item.name.item.as_str())) + { + if !ctx.schema.type_overlap(parent_type, frag_type) { + ctx.report_error( + &error_message( + Some(&spread.item.name.item), + parent_type.name().unwrap_or(""), + frag_type.name().unwrap_or("")), + &[spread.start.clone()]); + } + } + } +} + +fn error_message(frag_name: Option<&str>, parent_type_name: &str, frag_type: &str) -> String { + if let Some(frag_name) = frag_name { + format!( + "Fragment \"{}\" cannot be spread here as objects of type \ + \"{}\" can never be of type \"{}\"", + frag_name, parent_type_name, frag_type) + } + else { + format!( + "Fragment cannot be spread here as objects of type \"{}\" \ + can never be of type \"{}\"", + parent_type_name, frag_type) + } +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn of_the_same_object() { + expect_passes_rule(factory, r#" + fragment objectWithinObject on Dog { ...dogFragment } + fragment dogFragment on Dog { barkVolume } + "#); + } + + #[test] + fn of_the_same_object_with_inline_fragment() { + expect_passes_rule(factory, r#" + fragment objectWithinObjectAnon on Dog { ... on Dog { barkVolume } } + "#); + } + + #[test] + fn object_into_an_implemented_interface() { + expect_passes_rule(factory, r#" + fragment objectWithinInterface on Pet { ...dogFragment } + fragment dogFragment on Dog { barkVolume } + "#); + } + + #[test] + fn object_into_containing_union() { + expect_passes_rule(factory, r#" + fragment objectWithinUnion on CatOrDog { ...dogFragment } + fragment dogFragment on Dog { barkVolume } + "#); + } + + #[test] + fn union_into_contained_object() { + expect_passes_rule(factory, r#" + fragment unionWithinObject on Dog { ...catOrDogFragment } + fragment catOrDogFragment on CatOrDog { __typename } + "#); + } + + #[test] + fn union_into_overlapping_interface() { + expect_passes_rule(factory, r#" + fragment unionWithinInterface on Pet { ...catOrDogFragment } + fragment catOrDogFragment on CatOrDog { __typename } + "#); + } + + #[test] + fn union_into_overlapping_union() { + expect_passes_rule(factory, r#" + fragment unionWithinUnion on DogOrHuman { ...catOrDogFragment } + fragment catOrDogFragment on CatOrDog { __typename } + "#); + } + + #[test] + fn interface_into_implemented_object() { + expect_passes_rule(factory, r#" + fragment interfaceWithinObject on Dog { ...petFragment } + fragment petFragment on Pet { name } + "#); + } + + #[test] + fn interface_into_overlapping_interface() { + expect_passes_rule(factory, r#" + fragment interfaceWithinInterface on Pet { ...beingFragment } + fragment beingFragment on Being { name } + "#); + } + + #[test] + fn interface_into_overlapping_interface_in_inline_fragment() { + expect_passes_rule(factory, r#" + fragment interfaceWithinInterface on Pet { ... on Being { name } } + "#); + } + + #[test] + fn interface_into_overlapping_union() { + expect_passes_rule(factory, r#" + fragment interfaceWithinUnion on CatOrDog { ...petFragment } + fragment petFragment on Pet { name } + "#); + } + + #[test] + fn different_object_into_object() { + expect_fails_rule(factory, r#" + fragment invalidObjectWithinObject on Cat { ...dogFragment } + fragment dogFragment on Dog { barkVolume } + "#, + &[ + RuleError::new(&error_message(Some("dogFragment"), "Cat", "Dog"), &[ + SourcePosition::new(55, 1, 54), + ]), + ]); + } + + #[test] + fn different_object_into_object_in_inline_fragment() { + expect_fails_rule(factory, r#" + fragment invalidObjectWithinObjectAnon on Cat { + ... on Dog { barkVolume } + } + "#, + &[ + RuleError::new(&error_message(None, "Cat", "Dog"), &[ + SourcePosition::new(71, 2, 12), + ]), + ]); + } + + #[test] + fn object_into_not_implementing_interface() { + expect_fails_rule(factory, r#" + fragment invalidObjectWithinInterface on Pet { ...humanFragment } + fragment humanFragment on Human { pets { name } } + "#, + &[ + RuleError::new(&error_message(Some("humanFragment"), "Pet", "Human"), &[ + SourcePosition::new(58, 1, 57), + ]), + ]); + } + + #[test] + fn object_into_not_containing_union() { + expect_fails_rule(factory, r#" + fragment invalidObjectWithinUnion on CatOrDog { ...humanFragment } + fragment humanFragment on Human { pets { name } } + "#, + &[ + RuleError::new(&error_message(Some("humanFragment"), "CatOrDog", "Human"), &[ + SourcePosition::new(59, 1, 58), + ]), + ]); + } + + #[test] + fn union_into_not_contained_object() { + expect_fails_rule(factory, r#" + fragment invalidUnionWithinObject on Human { ...catOrDogFragment } + fragment catOrDogFragment on CatOrDog { __typename } + "#, + &[ + RuleError::new(&error_message(Some("catOrDogFragment"), "Human", "CatOrDog"), &[ + SourcePosition::new(56, 1, 55), + ]), + ]); + } + + #[test] + fn union_into_non_overlapping_interface() { + expect_fails_rule(factory, r#" + fragment invalidUnionWithinInterface on Pet { ...humanOrAlienFragment } + fragment humanOrAlienFragment on HumanOrAlien { __typename } + "#, + &[ + RuleError::new(&error_message(Some("humanOrAlienFragment"), "Pet", "HumanOrAlien"), &[ + SourcePosition::new(57, 1, 56), + ]), + ]); + } + + #[test] + fn union_into_non_overlapping_union() { + expect_fails_rule(factory, r#" + fragment invalidUnionWithinUnion on CatOrDog { ...humanOrAlienFragment } + fragment humanOrAlienFragment on HumanOrAlien { __typename } + "#, + &[ + RuleError::new(&error_message(Some("humanOrAlienFragment"), "CatOrDog", "HumanOrAlien"), &[ + SourcePosition::new(58, 1, 57), + ]), + ]); + } + + #[test] + fn interface_into_non_implementing_object() { + expect_fails_rule(factory, r#" + fragment invalidInterfaceWithinObject on Cat { ...intelligentFragment } + fragment intelligentFragment on Intelligent { iq } + "#, + &[ + RuleError::new(&error_message(Some("intelligentFragment"), "Cat", "Intelligent"), &[ + SourcePosition::new(58, 1, 57), + ]), + ]); + } + + #[test] + fn interface_into_non_overlapping_interface() { + expect_fails_rule(factory, r#" + fragment invalidInterfaceWithinInterface on Pet { + ...intelligentFragment + } + fragment intelligentFragment on Intelligent { iq } + "#, + &[ + RuleError::new(&error_message(Some("intelligentFragment"), "Pet", "Intelligent"), &[ + SourcePosition::new(73, 2, 12), + ]), + ]); + } + + #[test] + fn interface_into_non_overlapping_interface_in_inline_fragment() { + expect_fails_rule(factory, r#" + fragment invalidInterfaceWithinInterfaceAnon on Pet { + ...on Intelligent { iq } + } + "#, + &[ + RuleError::new(&error_message(None, "Pet", "Intelligent"), &[ + SourcePosition::new(77, 2, 12), + ]), + ]); + } + + #[test] + fn interface_into_non_overlapping_union() { + expect_fails_rule(factory, r#" + fragment invalidInterfaceWithinUnion on HumanOrAlien { ...petFragment } + fragment petFragment on Pet { name } + "#, + &[ + RuleError::new(&error_message(Some("petFragment"), "HumanOrAlien", "Pet"), &[ + SourcePosition::new(66, 1, 65), + ]), + ]); + } + +} diff --git a/src/validation/rules/provided_non_null_arguments.rs b/src/validation/rules/provided_non_null_arguments.rs new file mode 100644 index 00000000..b79d9082 --- /dev/null +++ b/src/validation/rules/provided_non_null_arguments.rs @@ -0,0 +1,280 @@ +use ast::{Field, Directive}; +use validation::{ValidatorContext, Visitor}; +use parser::Spanning; +use schema::meta::{Field as FieldType}; +use schema::model::DirectiveType; + +pub struct ProvidedNonNullArguments { +} + +pub fn factory() -> ProvidedNonNullArguments { + ProvidedNonNullArguments {} +} + +impl<'a> Visitor<'a> for ProvidedNonNullArguments { + fn enter_field(&mut self, ctx: &mut ValidatorContext<'a>, field: &'a Spanning) { + let field_name = &field.item.name.item; + + if let Some(&FieldType { arguments: Some(ref meta_args), ..}) = ctx.parent_type().and_then(|t| t.field_by_name(field_name)) { + for meta_arg in meta_args { + if meta_arg.arg_type.is_non_null() + && field.item.arguments.as_ref().and_then(|args| args.item.get(&meta_arg.name)).is_none() + { + ctx.report_error( + &field_error_message(field_name, &meta_arg.name, &format!("{}", meta_arg.arg_type)), + &[field.start.clone()]); + } + } + } + } + + fn enter_directive(&mut self, ctx: &mut ValidatorContext<'a>, directive: &'a Spanning) { + let directive_name = &directive.item.name.item; + + if let Some(&DirectiveType { arguments: ref meta_args, ..}) = ctx.schema.directive_by_name(directive_name) { + for meta_arg in meta_args { + if meta_arg.arg_type.is_non_null() + && directive.item.arguments.as_ref().and_then(|args| args.item.get(&meta_arg.name)).is_none() + { + ctx.report_error( + &directive_error_message(directive_name, &meta_arg.name, &format!("{}", meta_arg.arg_type)), + &[directive.start.clone()]); + } + } + } + } +} + +fn field_error_message(field_name: &str, arg_name: &str, type_name: &str) -> String { + format!( + r#"Field "{}" argument "{}" of type "{}" is required but not provided"#, + field_name, arg_name, type_name) +} + +fn directive_error_message(directive_name: &str, arg_name: &str, type_name: &str) -> String { + format!( + r#"Directive "@{}" argument "{}" of type "{}" is required but not provided"#, + directive_name, arg_name, type_name) +} + +#[cfg(test)] +mod tests { + use super::{field_error_message, directive_error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn ignores_unknown_arguments() { + expect_passes_rule(factory, r#" + { + dog { + isHousetrained(unknownArgument: true) + } + } + "#); + } + + #[test] + fn arg_on_optional_arg() { + expect_passes_rule(factory, r#" + { + dog { + isHousetrained(atOtherHomes: true) + } + } + "#); + } + + #[test] + fn no_arg_on_optional_arg() { + expect_passes_rule(factory, r#" + { + dog { + isHousetrained + } + } + "#); + } + + #[test] + fn multiple_args() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleReqs(req1: 1, req2: 2) + } + } + "#); + } + + #[test] + fn multiple_args_reverse_order() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleReqs(req2: 2, req1: 1) + } + } + "#); + } + + #[test] + fn no_args_on_multiple_optional() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOpts + } + } + "#); + } + + #[test] + fn one_arg_on_multiple_optional() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOpts(opt1: 1) + } + } + "#); + } + + #[test] + fn second_arg_on_multiple_optional() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOpts(opt2: 1) + } + } + "#); + } + + #[test] + fn muliple_reqs_on_mixed_list() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4) + } + } + "#); + } + + #[test] + fn multiple_reqs_and_one_opt_on_mixed_list() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4, opt1: 5) + } + } + "#); + } + + #[test] + fn all_reqs_on_opts_on_mixed_list() { + expect_passes_rule(factory, r#" + { + complicatedArgs { + multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6) + } + } + "#); + } + + #[test] + fn missing_one_non_nullable_argument() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + multipleReqs(req2: 2) + } + } + "#, + &[ + RuleError::new(&field_error_message("multipleReqs", "req1", "Int!"), &[ + SourcePosition::new(63, 3, 16), + ]), + ]); + } + + #[test] + fn missing_multiple_non_nullable_arguments() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + multipleReqs + } + } + "#, + &[ + RuleError::new(&field_error_message("multipleReqs", "req1", "Int!"), &[ + SourcePosition::new(63, 3, 16), + ]), + RuleError::new(&field_error_message("multipleReqs", "req2", "Int!"), &[ + SourcePosition::new(63, 3, 16), + ]), + ]); + } + + #[test] + fn incorrect_value_and_missing_argument() { + expect_fails_rule(factory, r#" + { + complicatedArgs { + multipleReqs(req1: "one") + } + } + "#, + &[ + RuleError::new(&field_error_message("multipleReqs", "req2", "Int!"), &[ + SourcePosition::new(63, 3, 16), + ]), + ]); + } + + #[test] + fn ignores_unknown_directives() { + expect_passes_rule(factory, r#" + { + dog @unknown + } + "#); + } + + #[test] + fn with_directives_of_valid_types() { + expect_passes_rule(factory, r#" + { + dog @include(if: true) { + name + } + human @skip(if: false) { + name + } + } + "#); + } + + #[test] + fn with_directive_with_missing_types() { + expect_fails_rule(factory, r#" + { + dog @include { + name @skip + } + } + "#, + &[ + RuleError::new(&directive_error_message("include", "if", "Boolean!"), &[ + SourcePosition::new(33, 2, 18), + ]), + RuleError::new(&directive_error_message("skip", "if", "Boolean!"), &[ + SourcePosition::new(65, 3, 21), + ]), + ]); + } +} diff --git a/src/validation/rules/scalar_leafs.rs b/src/validation/rules/scalar_leafs.rs new file mode 100644 index 00000000..dfd2f527 --- /dev/null +++ b/src/validation/rules/scalar_leafs.rs @@ -0,0 +1,168 @@ +use ast::Field; +use validation::{ValidatorContext, Visitor, RuleError}; +use parser::Spanning; + +pub struct ScalarLeafs {} + +pub fn factory() -> ScalarLeafs { + ScalarLeafs {} +} + +impl<'a> Visitor<'a> for ScalarLeafs { + fn enter_field(&mut self, ctx: &mut ValidatorContext<'a>, field: &'a Spanning) { + let field_name = &field.item.name.item; + + let error = if let (Some(field_type), Some(field_type_literal)) = (ctx.current_type(), ctx.current_type_literal()) { + match (field_type.is_leaf(), &field.item.selection_set) { + (true, &Some(_)) => Some(RuleError::new( + &no_allowed_error_message(field_name, &format!("{}", field_type_literal)), + &[field.start.clone()])), + (false, &None) => Some(RuleError::new( + &required_error_message(field_name, &format!("{}", field_type_literal)), + &[field.start.clone()])), + _ => None, + } + } else { None }; + + if let Some(error) = error { + ctx.append_errors(vec![error]); + } + } +} + +fn no_allowed_error_message(field_name: &str, type_name: &str) -> String { + format!( + r#"Field "{}" must not have a selection since type {} has no subfields"#, + field_name, type_name) +} + +fn required_error_message(field_name: &str, type_name: &str) -> String { + format!( + r#"Field "{}" of type "{}" must have a selection of subfields. Did you mean "{} {{ ... }}"?"#, + field_name, type_name, field_name) +} + +#[cfg(test)] +mod tests { + use super::{no_allowed_error_message, required_error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn valid_scalar_selection() { + expect_passes_rule(factory, r#" + fragment scalarSelection on Dog { + barks + } + "#); + } + + #[test] + fn object_type_missing_selection() { + expect_fails_rule(factory, r#" + query directQueryOnObjectWithoutSubFields { + human + } + "#, + &[ + RuleError::new(&required_error_message("human", "Human"), &[ + SourcePosition::new(67, 2, 12), + ]), + ]); + } + + #[test] + fn interface_type_missing_selection() { + expect_fails_rule(factory, r#" + { + human { pets } + } + "#, + &[ + RuleError::new(&required_error_message("pets", "[Pet]"), &[ + SourcePosition::new(33, 2, 20), + ]), + ]); + } + + #[test] + fn valid_scalar_selection_with_args() { + expect_passes_rule(factory, r#" + fragment scalarSelectionWithArgs on Dog { + doesKnowCommand(dogCommand: SIT) + } + "#); + } + + #[test] + fn scalar_selection_not_allowed_on_boolean() { + expect_fails_rule(factory, r#" + fragment scalarSelectionsNotAllowedOnBoolean on Dog { + barks { sinceWhen } + } + "#, + &[ + RuleError::new(&no_allowed_error_message("barks", "Boolean"), &[ + SourcePosition::new(77, 2, 12), + ]), + ]); + } + + #[test] + fn scalar_selection_not_allowed_on_enum() { + expect_fails_rule(factory, r#" + fragment scalarSelectionsNotAllowedOnEnum on Cat { + furColor { inHexdec } + } + "#, + &[ + RuleError::new(&no_allowed_error_message("furColor", "FurColor"), &[ + SourcePosition::new(74, 2, 12), + ]), + ]); + } + + #[test] + fn scalar_selection_not_allowed_with_args() { + expect_fails_rule(factory, r#" + fragment scalarSelectionsNotAllowedWithArgs on Dog { + doesKnowCommand(dogCommand: SIT) { sinceWhen } + } + "#, + &[ + RuleError::new(&no_allowed_error_message("doesKnowCommand", "Boolean"), &[ + SourcePosition::new(76, 2, 12), + ]), + ]); + } + + #[test] + fn scalar_selection_not_allowed_with_directives() { + expect_fails_rule(factory, r#" + fragment scalarSelectionsNotAllowedWithDirectives on Dog { + name @include(if: true) { isAlsoHumanName } + } + "#, + &[ + RuleError::new(&no_allowed_error_message("name", "String"), &[ + SourcePosition::new(82, 2, 12), + ]), + ]); + } + + #[test] + fn scalar_selection_not_allowed_with_directives_and_args() { + expect_fails_rule(factory, r#" + fragment scalarSelectionsNotAllowedWithDirectivesAndArgs on Dog { + doesKnowCommand(dogCommand: SIT) @include(if: true) { sinceWhen } + } + "#, + &[ + RuleError::new(&no_allowed_error_message("doesKnowCommand", "Boolean"), &[ + SourcePosition::new(89, 2, 12), + ]), + ]); + } + +} diff --git a/src/validation/rules/unique_argument_names.rs b/src/validation/rules/unique_argument_names.rs new file mode 100644 index 00000000..99abb946 --- /dev/null +++ b/src/validation/rules/unique_argument_names.rs @@ -0,0 +1,201 @@ +use std::collections::hash_map::{HashMap, Entry}; + +use ast::{Directive, Field, InputValue}; +use validation::{ValidatorContext, Visitor}; +use parser::{SourcePosition, Spanning}; + +pub struct UniqueArgumentNames<'a> { + known_names: HashMap<&'a str, SourcePosition>, +} + +pub fn factory<'a>() -> UniqueArgumentNames<'a> { + UniqueArgumentNames { + known_names: HashMap::new(), + } +} + +impl<'a> Visitor<'a> for UniqueArgumentNames<'a> { + fn enter_directive(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.known_names = HashMap::new(); + } + + fn enter_field(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.known_names = HashMap::new(); + } + + fn enter_argument(&mut self, ctx: &mut ValidatorContext<'a>, &(ref arg_name, _): &'a (Spanning, Spanning)) { + match self.known_names.entry(&arg_name.item) { + Entry::Occupied(e) => { + ctx.report_error( + &error_message(&arg_name.item), + &[e.get().clone(), arg_name.start.clone()]); + } + Entry::Vacant(e) => { + e.insert(arg_name.start.clone()); + } + } + } +} + +fn error_message(arg_name: &str) -> String { + format!("There can only be one argument named \"{}\"", arg_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn no_arguments_on_field() { + expect_passes_rule(factory, r#" + { + field + } + "#); + } + + #[test] + fn no_arguments_on_directive() { + expect_passes_rule(factory, r#" + { + field @directive + } + "#); + } + + #[test] + fn argument_on_field() { + expect_passes_rule(factory, r#" + { + field(arg: "value") + } + "#); + } + + #[test] + fn argument_on_directive() { + expect_passes_rule(factory, r#" + { + field @directive(arg: "value") + } + "#); + } + + #[test] + fn same_argument_on_two_fields() { + expect_passes_rule(factory, r#" + { + one: field(arg: "value") + two: field(arg: "value") + } + "#); + } + + #[test] + fn same_argument_on_field_and_directive() { + expect_passes_rule(factory, r#" + { + field(arg: "value") @directive(arg: "value") + } + "#); + } + + #[test] + fn same_argument_on_two_directives() { + expect_passes_rule(factory, r#" + { + field @directive1(arg: "value") @directive2(arg: "value") + } + "#); + } + + #[test] + fn multiple_field_arguments() { + expect_passes_rule(factory, r#" + { + field(arg1: "value", arg2: "value", arg3: "value") + } + "#); + } + + #[test] + fn multiple_directive_arguments() { + expect_passes_rule(factory, r#" + { + field @directive(arg1: "value", arg2: "value", arg3: "value") + } + "#); + } + + #[test] + fn duplicate_field_arguments() { + expect_fails_rule(factory, r#" + { + field(arg1: "value", arg1: "value") + } + "#, + &[ + RuleError::new(&error_message("arg1"), &[ + SourcePosition::new(31, 2, 18), + SourcePosition::new(46, 2, 33), + ]), + ]); + } + + #[test] + fn many_duplicate_field_arguments() { + expect_fails_rule(factory, r#" + { + field(arg1: "value", arg1: "value", arg1: "value") + } + "#, + &[ + RuleError::new(&error_message("arg1"), &[ + SourcePosition::new(31, 2, 18), + SourcePosition::new(46, 2, 33), + ]), + RuleError::new(&error_message("arg1"), &[ + SourcePosition::new(31, 2, 18), + SourcePosition::new(61, 2, 48), + ]), + ]); + } + + #[test] + fn duplicate_directive_arguments() { + expect_fails_rule(factory, r#" + { + field @directive(arg1: "value", arg1: "value") + } + "#, + &[ + RuleError::new(&error_message("arg1"), &[ + SourcePosition::new(42, 2, 29), + SourcePosition::new(57, 2, 44), + ]), + ]); + } + + #[test] + fn many_duplicate_directive_arguments() { + expect_fails_rule(factory, r#" + { + field @directive(arg1: "value", arg1: "value", arg1: "value") + } + "#, + &[ + RuleError::new(&error_message("arg1"), &[ + SourcePosition::new(42, 2, 29), + SourcePosition::new(57, 2, 44), + ]), + RuleError::new(&error_message("arg1"), &[ + SourcePosition::new(42, 2, 29), + SourcePosition::new(72, 2, 59), + ]), + ]); + } + +} diff --git a/src/validation/rules/unique_fragment_names.rs b/src/validation/rules/unique_fragment_names.rs new file mode 100644 index 00000000..e14c8135 --- /dev/null +++ b/src/validation/rules/unique_fragment_names.rs @@ -0,0 +1,149 @@ +use std::collections::hash_map::{HashMap, Entry}; + +use ast::Fragment; +use parser::{SourcePosition, Spanning}; +use validation::{ValidatorContext, Visitor}; + +pub struct UniqueFragmentNames<'a> { + names: HashMap<&'a str, SourcePosition>, +} + +pub fn factory<'a>() -> UniqueFragmentNames<'a> { + UniqueFragmentNames { + names: HashMap::new(), + } +} + +impl<'a> Visitor<'a> for UniqueFragmentNames<'a> { + fn enter_fragment_definition(&mut self, context: &mut ValidatorContext<'a>, f: &'a Spanning) { + match self.names.entry(&f.item.name.item) { + Entry::Occupied(e) => { + context.report_error( + &duplicate_message(&f.item.name.item), + &[e.get().clone(), f.item.name.start.clone()]); + } + Entry::Vacant(e) => { + e.insert(f.item.name.start.clone()); + } + } + } +} + +fn duplicate_message(frag_name: &str) -> String { + format!("There can only be one fragment named {}", frag_name) +} + +#[cfg(test)] +mod tests { + use super::{duplicate_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn no_fragments() { + expect_passes_rule(factory, r#" + { + field + } + "#); + } + + #[test] + fn one_fragment() { + expect_passes_rule(factory, r#" + { + ...fragA + } + + fragment fragA on Type { + field + } + "#); + } + + #[test] + fn many_fragments() { + expect_passes_rule(factory, r#" + { + ...fragA + ...fragB + ...fragC + } + fragment fragA on Type { + fieldA + } + fragment fragB on Type { + fieldB + } + fragment fragC on Type { + fieldC + } + "#); + } + + #[test] + fn inline_fragments_always_unique() { + expect_passes_rule(factory, r#" + { + ...on Type { + fieldA + } + ...on Type { + fieldB + } + } + "#); + } + + #[test] + fn fragment_and_operation_named_the_same() { + expect_passes_rule(factory, r#" + query Foo { + ...Foo + } + fragment Foo on Type { + field + } + "#); + } + + #[test] + fn fragments_named_the_same() { + expect_fails_rule(factory, r#" + { + ...fragA + } + fragment fragA on Type { + fieldA + } + fragment fragA on Type { + fieldB + } + "#, + &[ + RuleError::new(&duplicate_message("fragA"), &[ + SourcePosition::new(65, 4, 19), + SourcePosition::new(131, 7, 19) + ]), + ]); + } + + #[test] + fn fragments_named_the_same_no_reference() { + expect_fails_rule(factory, r#" + fragment fragA on Type { + fieldA + } + fragment fragA on Type { + fieldB + } + "#, + &[ + RuleError::new(&duplicate_message("fragA"), &[ + SourcePosition::new(20, 1, 19), + SourcePosition::new(86, 4, 19) + ]), + ]); + } +} diff --git a/src/validation/rules/unique_input_field_names.rs b/src/validation/rules/unique_input_field_names.rs new file mode 100644 index 00000000..da407a6c --- /dev/null +++ b/src/validation/rules/unique_input_field_names.rs @@ -0,0 +1,131 @@ +use std::collections::hash_map::{HashMap, Entry}; + +use ast::InputValue; +use validation::{ValidatorContext, Visitor}; +use parser::{SourcePosition, Spanning}; + +pub struct UniqueInputFieldNames<'a> { + known_name_stack: Vec>, +} + +pub fn factory<'a>() -> UniqueInputFieldNames<'a> { + UniqueInputFieldNames { + known_name_stack: Vec::new(), + } +} + +impl<'a> Visitor<'a> for UniqueInputFieldNames<'a> { + fn enter_object_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a Vec<(Spanning, Spanning)>>) { + self.known_name_stack.push(HashMap::new()); + } + + fn exit_object_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a Vec<(Spanning, Spanning)>>) { + self.known_name_stack.pop(); + } + + fn enter_object_field(&mut self, ctx: &mut ValidatorContext<'a>, &(ref field_name, _): &'a (Spanning, Spanning)) { + if let Some(ref mut known_names) = self.known_name_stack.last_mut() { + match known_names.entry(&field_name.item) { + Entry::Occupied(e) => { + ctx.report_error( + &error_message(&field_name.item), + &[e.get().clone(), field_name.start.clone()]); + } + Entry::Vacant(e) => { + e.insert(field_name.start.clone()); + } + } + } + } +} + +fn error_message(field_name: &str) -> String { + format!("There can only be one input field named \"{}\"", field_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn input_object_with_fields() { + expect_passes_rule(factory, r#" + { + field(arg: { f: true }) + } + "#); + } + + #[test] + fn same_input_object_within_two_args() { + expect_passes_rule(factory, r#" + { + field(arg1: { f: true }, arg2: { f: true }) + } + "#); + } + + #[test] + fn multiple_input_object_fields() { + expect_passes_rule(factory, r#" + { + field(arg: { f1: "value", f2: "value", f3: "value" }) + } + "#); + } + + #[test] + fn allows_for_nested_input_objects_with_similar_fields() { + expect_passes_rule(factory, r#" + { + field(arg: { + deep: { + deep: { + id: 1 + } + id: 1 + } + id: 1 + }) + } + "#); + } + + #[test] + fn duplicate_input_object_fields() { + expect_fails_rule(factory, r#" + { + field(arg: { f1: "value", f1: "value" }) + } + "#, + &[ + RuleError::new(&error_message("f1"), &[ + SourcePosition::new(38, 2, 25), + SourcePosition::new(51, 2, 38), + ]), + ]); + } + + #[test] + fn many_duplicate_input_object_fields() { + expect_fails_rule(factory, r#" + { + field(arg: { f1: "value", f1: "value", f1: "value" }) + } + "#, + &[ + RuleError::new(&error_message("f1"), &[ + SourcePosition::new(38, 2, 25), + SourcePosition::new(51, 2, 38), + ]), + RuleError::new(&error_message("f1"), &[ + SourcePosition::new(38, 2, 25), + SourcePosition::new(64, 2, 51), + ]), + ]); + } + +} diff --git a/src/validation/rules/unique_operation_names.rs b/src/validation/rules/unique_operation_names.rs new file mode 100644 index 00000000..b0e06717 --- /dev/null +++ b/src/validation/rules/unique_operation_names.rs @@ -0,0 +1,145 @@ +use std::collections::hash_map::{HashMap, Entry}; + +use ast::Operation; +use parser::{SourcePosition, Spanning}; +use validation::{ValidatorContext, Visitor}; + +pub struct UniqueOperationNames<'a> { + names: HashMap<&'a str, SourcePosition>, +} + +pub fn factory<'a>() -> UniqueOperationNames<'a> { + UniqueOperationNames { + names: HashMap::new(), + } +} + +impl<'a> Visitor<'a> for UniqueOperationNames<'a> { + fn enter_operation_definition(&mut self, ctx: &mut ValidatorContext<'a>, op: &'a Spanning) { + if let &Some(ref op_name) = &op.item.name { + match self.names.entry(&op_name.item) { + Entry::Occupied(e) => { + ctx.report_error( + &error_message(&op_name.item), + &[e.get().clone(), op.start.clone()]); + } + Entry::Vacant(e) => { + e.insert(op.start.clone()); + } + } + } + } +} + +fn error_message(op_name: &str) -> String { + format!("There can only be one operation named {}", op_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn no_operations() { + expect_passes_rule(factory, r#" + fragment fragA on Type { + field + } + "#); + } + + #[test] + fn one_anon_operation() { + expect_passes_rule(factory, r#" + { + field + } + "#); + } + + #[test] + fn one_named_operation() { + expect_passes_rule(factory, r#" + query Foo { + field + } + "#); + } + + #[test] + fn multiple_operations() { + expect_passes_rule(factory, r#" + query Foo { + field + } + + query Bar { + field + } + "#); + } + + #[test] + fn multiple_operations_of_different_types() { + expect_passes_rule(factory, r#" + query Foo { + field + } + + mutation Bar { + field + } + "#); + } + + #[test] + fn fragment_and_operation_named_the_same() { + expect_passes_rule(factory, r#" + query Foo { + ...Foo + } + fragment Foo on Type { + field + } + "#); + } + + #[test] + fn multiple_operations_of_same_name() { + expect_fails_rule(factory, r#" + query Foo { + fieldA + } + query Foo { + fieldB + } + "#, + &[ + RuleError::new(&error_message("Foo"), &[ + SourcePosition::new(11, 1, 10), + SourcePosition::new(64, 4, 10), + ]), + ]); + } + + #[test] + fn multiple_ops_of_same_name_of_different_types() { + expect_fails_rule(factory, r#" + query Foo { + fieldA + } + mutation Foo { + fieldB + } + "#, + &[ + RuleError::new(&error_message("Foo"), &[ + SourcePosition::new(11, 1, 10), + SourcePosition::new(64, 4, 10), + ]), + ]); + } +} diff --git a/src/validation/rules/unique_variable_names.rs b/src/validation/rules/unique_variable_names.rs new file mode 100644 index 00000000..25c48a48 --- /dev/null +++ b/src/validation/rules/unique_variable_names.rs @@ -0,0 +1,81 @@ +use std::collections::hash_map::{HashMap, Entry}; + +use ast::{Operation, VariableDefinition}; +use parser::{SourcePosition, Spanning}; +use validation::{ValidatorContext, Visitor}; + +pub struct UniqueVariableNames<'a> { + names: HashMap<&'a str, SourcePosition>, +} + +pub fn factory<'a>() -> UniqueVariableNames<'a> { + UniqueVariableNames { + names: HashMap::new(), + } +} + +impl<'a> Visitor<'a> for UniqueVariableNames<'a> { + fn enter_operation_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) { + self.names = HashMap::new(); + } + + fn enter_variable_definition(&mut self, ctx: &mut ValidatorContext<'a>, &(ref var_name, _): &'a (Spanning, VariableDefinition)) { + match self.names.entry(&var_name.item) { + Entry::Occupied(e) => { + ctx.report_error( + &error_message(&var_name.item), + &[e.get().clone(), var_name.start.clone()]); + } + Entry::Vacant(e) => { + e.insert(var_name.start.clone()); + } + } + } +} + +fn error_message(var_name: &str) -> String { + format!("There can only be one variable named {}", var_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn unique_variable_names() { + expect_passes_rule(factory, r#" + query A($x: Int, $y: String) { __typename } + query B($x: String, $y: Int) { __typename } + "#); + } + + #[test] + fn duplicate_variable_names() { + expect_fails_rule(factory, r#" + query A($x: Int, $x: Int, $x: String) { __typename } + query B($x: String, $x: Int) { __typename } + query C($x: Int, $x: Int) { __typename } + "#, + &[ + RuleError::new(&error_message("x"), &[ + SourcePosition::new(19, 1, 18), + SourcePosition::new(28, 1, 27), + ]), + RuleError::new(&error_message("x"), &[ + SourcePosition::new(19, 1, 18), + SourcePosition::new(37, 1, 36), + ]), + RuleError::new(&error_message("x"), &[ + SourcePosition::new(82, 2, 18), + SourcePosition::new(94, 2, 30), + ]), + RuleError::new(&error_message("x"), &[ + SourcePosition::new(136, 3, 18), + SourcePosition::new(145, 3, 27), + ]), + ]); + } +} diff --git a/src/validation/rules/variables_are_input_types.rs b/src/validation/rules/variables_are_input_types.rs new file mode 100644 index 00000000..8b6b9eb9 --- /dev/null +++ b/src/validation/rules/variables_are_input_types.rs @@ -0,0 +1,62 @@ +use ast::VariableDefinition; +use parser::Spanning; +use validation::{ValidatorContext, Visitor}; + +pub struct UniqueVariableNames {} + +pub fn factory() -> UniqueVariableNames { + UniqueVariableNames {} +} + +impl<'a> Visitor<'a> for UniqueVariableNames { + fn enter_variable_definition(&mut self, ctx: &mut ValidatorContext<'a>, &(ref var_name, ref var_def): &'a (Spanning, VariableDefinition)) { + if let Some(var_type) = ctx.schema.concrete_type_by_name(var_def.var_type.item.innermost_name()) { + if !var_type.is_input() { + ctx.report_error( + &error_message(&var_name.item, &format!("{}", var_def.var_type.item)), + &[var_def.var_type.start.clone()]); + } + } + } +} + +fn error_message(var_name: &str, type_name: &str) -> String { + format!("Variable \"{}\" cannot be of non-input type \"{}\"", var_name, type_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn input_types_are_valid() { + expect_passes_rule(factory, r#" + query Foo($a: String, $b: [Boolean!]!, $c: ComplexInput) { + field(a: $a, b: $b, c: $c) + } + "#); + } + + #[test] + fn output_types_are_invalid() { + expect_fails_rule(factory, r#" + query Foo($a: Dog, $b: [[CatOrDog!]]!, $c: Pet) { + field(a: $a, b: $b, c: $c) + } + "#, + &[ + RuleError::new(&error_message("a", "Dog"), &[ + SourcePosition::new(25, 1, 24), + ]), + RuleError::new(&error_message("b", "[[CatOrDog!]]!"), &[ + SourcePosition::new(34, 1, 33), + ]), + RuleError::new(&error_message("c", "Pet"), &[ + SourcePosition::new(54, 1, 53), + ]), + ]); + } +} diff --git a/src/validation/rules/variables_in_allowed_position.rs b/src/validation/rules/variables_in_allowed_position.rs new file mode 100644 index 00000000..e21f3b62 --- /dev/null +++ b/src/validation/rules/variables_in_allowed_position.rs @@ -0,0 +1,429 @@ +use std::collections::{HashSet, HashMap}; + +use ast::{Type, VariableDefinition, Document, Fragment, Operation, FragmentSpread}; +use parser::Spanning; +use validation::{ValidatorContext, Visitor}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Scope<'a> { + Operation(Option<&'a str>), + Fragment(&'a str), +} + +pub struct VariableInAllowedPosition<'a> { + spreads: HashMap, HashSet<&'a str>>, + variable_usages: HashMap, Vec<(Spanning<&'a String>, Type)>>, + variable_defs: HashMap, Vec<&'a (Spanning, VariableDefinition)>>, + current_scope: Option>, +} + +pub fn factory<'a>() -> VariableInAllowedPosition<'a> { + VariableInAllowedPosition { + spreads: HashMap::new(), + variable_usages: HashMap::new(), + variable_defs: HashMap::new(), + current_scope: None, + } +} + +impl<'a> VariableInAllowedPosition<'a> { + fn collect_incorrect_usages( + &self, + from: &Scope<'a>, + var_defs: &Vec<&'a (Spanning, VariableDefinition)>, + ctx: &mut ValidatorContext<'a>, + visited: &mut HashSet>, + ) + { + if visited.contains(from) { + return; + } + + visited.insert(from.clone()); + + if let Some(usages) = self.variable_usages.get(from) { + for &(ref var_name, ref var_type) in usages { + if let Some(&&(ref var_def_name, ref var_def)) = var_defs + .iter() + .filter(|&&&(ref n, _)| &n.item == var_name.item) + .next() + { + let expected_type = match (&var_def.default_value, &var_def.var_type.item) { + (&Some(_), &Type::List(ref inner)) => Type::NonNullList(inner.clone()), + (&Some(_), &Type::Named(ref inner)) => Type::NonNullNamed(inner.clone()), + (_, t) => t.clone(), + }; + + println!("Variable {} of type {} used in position expecting {}", var_name.item, expected_type, var_type); + + if !ctx.schema.is_subtype(&expected_type, var_type) { + ctx.report_error( + &error_message(&var_name.item, &format!("{}", expected_type), &format!("{}", var_type)), + &[var_def_name.start.clone(), var_name.start.clone()]); + } + } + } + } + + if let Some(spreads) = self.spreads.get(from) { + for spread in spreads { + self.collect_incorrect_usages(&Scope::Fragment(spread), var_defs, ctx, visited); + } + } + } +} + +impl<'a> Visitor<'a> for VariableInAllowedPosition<'a> { + fn exit_document(&mut self, ctx: &mut ValidatorContext<'a>, _: &'a Document) { + for (op_scope, var_defs) in &self.variable_defs { + self.collect_incorrect_usages(&op_scope, var_defs, ctx, &mut HashSet::new()); + } + } + + fn enter_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, fragment: &'a Spanning) { + self.current_scope = Some(Scope::Fragment(&fragment.item.name.item)); + } + + fn enter_operation_definition(&mut self, _: &mut ValidatorContext<'a>, op: &'a Spanning) { + self.current_scope = Some(Scope::Operation(op.item.name.as_ref().map(|s| s.item.as_str()))); + } + + fn enter_fragment_spread(&mut self, _: &mut ValidatorContext<'a>, spread: &'a Spanning) { + if let Some(ref scope) = self.current_scope { + self.spreads + .entry(scope.clone()) + .or_insert_with(|| HashSet::new()) + .insert(&spread.item.name.item); + } + } + + fn enter_variable_definition(&mut self, _: &mut ValidatorContext<'a>, def: &'a (Spanning, VariableDefinition)) { + if let Some(ref scope) = self.current_scope { + self.variable_defs + .entry(scope.clone()) + .or_insert_with(|| Vec::new()) + .push(def); + } + } + + fn enter_variable_value(&mut self, ctx: &mut ValidatorContext<'a>, var_name: Spanning<&'a String>) { + if let (&Some(ref scope), Some(input_type)) = (&self.current_scope, ctx.current_input_type_literal()) { + self.variable_usages + .entry(scope.clone()) + .or_insert_with(|| Vec::new()) + .push((Spanning::start_end(&var_name.start, &var_name.end, &var_name.item), input_type.clone())); + } + } +} + +fn error_message(var_name: &str, type_name: &str, expected_type_name: &str) -> String { + format!( + "Variable \"{}\" of type \"{}\" used in position expecting type \"{}\"", + var_name, type_name, expected_type_name) +} + +#[cfg(test)] +mod tests { + use super::{error_message, factory}; + + use parser::SourcePosition; + use validation::{RuleError, expect_passes_rule, expect_fails_rule}; + + #[test] + fn boolean_into_boolean() { + expect_passes_rule(factory, r#" + query Query($booleanArg: Boolean) + { + complicatedArgs { + booleanArgField(booleanArg: $booleanArg) + } + } + "#); + } + + #[test] + fn boolean_into_boolean_within_fragment() { + expect_passes_rule(factory, r#" + fragment booleanArgFrag on ComplicatedArgs { + booleanArgField(booleanArg: $booleanArg) + } + query Query($booleanArg: Boolean) + { + complicatedArgs { + ...booleanArgFrag + } + } + "#); + + expect_passes_rule(factory, r#" + query Query($booleanArg: Boolean) + { + complicatedArgs { + ...booleanArgFrag + } + } + fragment booleanArgFrag on ComplicatedArgs { + booleanArgField(booleanArg: $booleanArg) + } + "#); + } + + #[test] + fn non_null_boolean_into_boolean() { + expect_passes_rule(factory, r#" + query Query($nonNullBooleanArg: Boolean!) + { + complicatedArgs { + booleanArgField(booleanArg: $nonNullBooleanArg) + } + } + "#); + } + + #[test] + fn non_null_boolean_into_boolean_within_fragment() { + expect_passes_rule(factory, r#" + fragment booleanArgFrag on ComplicatedArgs { + booleanArgField(booleanArg: $nonNullBooleanArg) + } + + query Query($nonNullBooleanArg: Boolean!) + { + complicatedArgs { + ...booleanArgFrag + } + } + "#); + } + + #[test] + fn int_into_non_null_int_with_default() { + expect_passes_rule(factory, r#" + query Query($intArg: Int = 1) + { + complicatedArgs { + nonNullIntArgField(nonNullIntArg: $intArg) + } + } + "#); + } + + #[test] + fn string_list_into_string_list() { + expect_passes_rule(factory, r#" + query Query($stringListVar: [String]) + { + complicatedArgs { + stringListArgField(stringListArg: $stringListVar) + } + } + "#); + } + + #[test] + fn non_null_string_list_into_string_list() { + expect_passes_rule(factory, r#" + query Query($stringListVar: [String!]) + { + complicatedArgs { + stringListArgField(stringListArg: $stringListVar) + } + } + "#); + } + + #[test] + fn string_into_string_list_in_item_position() { + expect_passes_rule(factory, r#" + query Query($stringVar: String) + { + complicatedArgs { + stringListArgField(stringListArg: [$stringVar]) + } + } + "#); + } + + #[test] + fn non_null_string_into_string_list_in_item_position() { + expect_passes_rule(factory, r#" + query Query($stringVar: String!) + { + complicatedArgs { + stringListArgField(stringListArg: [$stringVar]) + } + } + "#); + } + + #[test] + fn complex_input_into_complex_input() { + expect_passes_rule(factory, r#" + query Query($complexVar: ComplexInput) + { + complicatedArgs { + complexArgField(complexArg: $complexVar) + } + } + "#); + } + + #[test] + fn complex_input_into_complex_input_in_field_position() { + expect_passes_rule(factory, r#" + query Query($boolVar: Boolean = false) + { + complicatedArgs { + complexArgField(complexArg: {requiredArg: $boolVar}) + } + } + "#); + } + + #[test] + fn non_null_boolean_into_non_null_boolean_in_directive() { + expect_passes_rule(factory, r#" + query Query($boolVar: Boolean!) + { + dog @include(if: $boolVar) + } + "#); + } + + #[test] + fn boolean_in_non_null_in_directive_with_default() { + expect_passes_rule(factory, r#" + query Query($boolVar: Boolean = false) + { + dog @include(if: $boolVar) + } + "#); + } + + #[test] + fn int_into_non_null_int() { + expect_fails_rule(factory, r#" + query Query($intArg: Int) { + complicatedArgs { + nonNullIntArgField(nonNullIntArg: $intArg) + } + } + "#, + &[ + RuleError::new(&error_message("intArg", "Int", "Int!"), &[ + SourcePosition::new(23, 1, 22), + SourcePosition::new(117, 3, 48), + ]), + ]); + } + + #[test] + fn int_into_non_null_int_within_fragment() { + expect_fails_rule(factory, r#" + fragment nonNullIntArgFieldFrag on ComplicatedArgs { + nonNullIntArgField(nonNullIntArg: $intArg) + } + + query Query($intArg: Int) { + complicatedArgs { + ...nonNullIntArgFieldFrag + } + } + "#, + &[ + RuleError::new(&error_message("intArg", "Int", "Int!"), &[ + SourcePosition::new(154, 5, 22), + SourcePosition::new(110, 2, 46), + ]), + ]); + } + + #[test] + fn int_into_non_null_int_within_nested_fragment() { + expect_fails_rule(factory, r#" + fragment outerFrag on ComplicatedArgs { + ...nonNullIntArgFieldFrag + } + + fragment nonNullIntArgFieldFrag on ComplicatedArgs { + nonNullIntArgField(nonNullIntArg: $intArg) + } + + query Query($intArg: Int) { + complicatedArgs { + ...outerFrag + } + } + "#, + &[ + RuleError::new(&error_message("intArg", "Int", "Int!"), &[ + SourcePosition::new(255, 9, 22), + SourcePosition::new(211, 6, 46), + ]), + ]); + } + + #[test] + fn string_over_boolean() { + expect_fails_rule(factory, r#" + query Query($stringVar: String) { + complicatedArgs { + booleanArgField(booleanArg: $stringVar) + } + } + "#, + &[ + RuleError::new(&error_message("stringVar", "String", "Boolean"), &[ + SourcePosition::new(23, 1, 22), + SourcePosition::new(117, 3, 42), + ]), + ]); + } + + #[test] + fn string_into_string_list() { + expect_fails_rule(factory, r#" + query Query($stringVar: String) { + complicatedArgs { + stringListArgField(stringListArg: $stringVar) + } + } + "#, + &[ + RuleError::new(&error_message("stringVar", "String", "[String]"), &[ + SourcePosition::new(23, 1, 22), + SourcePosition::new(123, 3, 48), + ]), + ]); + } + + #[test] + fn boolean_into_non_null_boolean_in_directive() { + expect_fails_rule(factory, r#" + query Query($boolVar: Boolean) { + dog @include(if: $boolVar) + } + "#, + &[ + RuleError::new(&error_message("boolVar", "Boolean", "Boolean!"), &[ + SourcePosition::new(23, 1, 22), + SourcePosition::new(73, 2, 29), + ]), + ]); + } + + #[test] + fn string_into_non_null_boolean_in_directive() { + expect_fails_rule(factory, r#" + query Query($stringVar: String) { + dog @include(if: $stringVar) + } + "#, + &[ + RuleError::new(&error_message("stringVar", "String", "Boolean!"), &[ + SourcePosition::new(23, 1, 22), + SourcePosition::new(74, 2, 29), + ]), + ]); + } +} diff --git a/src/validation/test_harness.rs b/src/validation/test_harness.rs new file mode 100644 index 00000000..77a5abd1 --- /dev/null +++ b/src/validation/test_harness.rs @@ -0,0 +1,479 @@ +use parser::parse_document_source; +use ast::{FromInputValue, InputValue}; +use types::base::GraphQLType; +use types::schema::Registry; +use types::scalars::ID; +use schema::model::{DirectiveType, DirectiveLocation, RootNode}; +use schema::meta::{EnumValue, MetaType}; +use validation::{Visitor, RuleError, ValidatorContext, MultiVisitor, visit}; + +struct Being; +struct Pet; +struct Canine; + +struct Dog; +struct Cat; + +struct Intelligent; +struct Human; +struct Alien; + +struct DogOrHuman; +struct CatOrDog; +struct HumanOrAlien; + +struct ComplicatedArgs; + +struct QueryRoot; + +#[derive(Debug)] +enum DogCommand { + Sit, + Heel, + Down, +} + +#[derive(Debug)] +enum FurColor { + Brown, + Black, + Tan, + Spotted, +} + +#[allow(dead_code)] +#[derive(Debug)] +struct ComplexInput { + required_field: bool, + int_field: Option, + string_field: Option, + boolean_field: Option, + string_list_field: Option>>, +} + +impl GraphQLType for Being { + fn name() -> Option<&'static str> { + Some("Being") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_interface_type::()(&[ + registry.field::>("name") + .argument(registry.arg::>("surname")), + ]) + .into_meta() + } +} + +impl GraphQLType for Pet { + fn name() -> Option<&'static str> { + Some("Pet") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_interface_type::()(&[ + registry.field::>("name") + .argument(registry.arg::>("surname")), + ]) + .into_meta() + } +} + +impl GraphQLType for Canine { + fn name() -> Option<&'static str> { + Some("Canine") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_interface_type::()(&[ + registry.field::>("name") + .argument(registry.arg::>("surname")), + ]) + .into_meta() + } +} + +impl GraphQLType for DogCommand { + fn name() -> Option<&'static str> { + Some("DogCommand") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_enum_type::()(&[ + EnumValue::new("SIT"), + EnumValue::new("HEEL"), + EnumValue::new("DOWN"), + ]) + .into_meta() + } +} + +impl FromInputValue for DogCommand { + fn from(v: &InputValue) -> Option { + match v.as_enum_value() { + Some("SIT") => Some(DogCommand::Sit), + Some("HEEL") => Some(DogCommand::Heel), + Some("DOWN") => Some(DogCommand::Down), + _ => None, + } + } +} + +impl GraphQLType for Dog { + fn name() -> Option<&'static str> { + Some("Dog") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("name") + .argument(registry.arg::>("surname")), + registry.field::>("nickname"), + registry.field::>("barkVolume"), + registry.field::>("barks"), + registry.field::>("doesKnowCommand") + .argument(registry.arg::>("dogCommand")), + registry.field::>("isHousetrained") + .argument(registry.arg_with_default("atOtherHomes", &true)), + registry.field::>("isAtLocation") + .argument(registry.arg::>("x")) + .argument(registry.arg::>("y")), + ]) + .interfaces(&[ + registry.get_type::(), + registry.get_type::(), + registry.get_type::(), + ]) + .into_meta() + } +} + +impl GraphQLType for FurColor { + fn name() -> Option<&'static str> { + Some("FurColor") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_enum_type::()(&[ + EnumValue::new("BROWN"), + EnumValue::new("BLACK"), + EnumValue::new("TAN"), + EnumValue::new("SPOTTED"), + ]) + .into_meta() + } +} + +impl FromInputValue for FurColor { + fn from(v: &InputValue) -> Option { + match v.as_enum_value() { + Some("BROWN") => Some(FurColor::Brown), + Some("BLACK") => Some(FurColor::Black), + Some("TAN") => Some(FurColor::Tan), + Some("SPOTTED") => Some(FurColor::Spotted), + _ => None, + } + } +} + +impl GraphQLType for Cat { + fn name() -> Option<&'static str> { + Some("Cat") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("name") + .argument(registry.arg::>("surname")), + registry.field::>("nickname"), + registry.field::>("meows"), + registry.field::>("meowVolume"), + registry.field::>("furColor"), + ]) + .interfaces(&[ + registry.get_type::(), + registry.get_type::(), + ]) + .into_meta() + } +} + +impl GraphQLType for CatOrDog { + fn name() -> Option<&'static str> { + Some("CatOrDog") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_union_type::()(&[ + registry.get_type::(), + registry.get_type::(), + ]) + .into_meta() + } +} + +impl GraphQLType for Intelligent { + fn name() -> Option<&'static str> { + Some("Intelligent") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_interface_type::()(&[ + registry.field::>("iq"), + ]) + .into_meta() + } +} + +impl GraphQLType for Human { + fn name() -> Option<&'static str> { + Some("Human") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("name") + .argument(registry.arg::>("surname")), + registry.field::>>>("pets"), + registry.field::>>("relatives"), + registry.field::>("iq"), + ]) + .interfaces(&[ + registry.get_type::(), + registry.get_type::(), + ]) + .into_meta() + } +} + +impl GraphQLType for Alien { + fn name() -> Option<&'static str> { + Some("Alien") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("name") + .argument(registry.arg::>("surname")), + registry.field::>("iq"), + registry.field::>("numEyes"), + ]) + .interfaces(&[ + registry.get_type::(), + registry.get_type::(), + ]) + .into_meta() + } +} + +impl GraphQLType for DogOrHuman { + fn name() -> Option<&'static str> { + Some("DogOrHuman") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_union_type::()(&[ + registry.get_type::(), + registry.get_type::(), + ]) + .into_meta() + } +} + +impl GraphQLType for HumanOrAlien { + fn name() -> Option<&'static str> { + Some("HumanOrAlien") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_union_type::()(&[ + registry.get_type::(), + registry.get_type::(), + ]) + .into_meta() + } +} + +impl GraphQLType for ComplexInput { + fn name() -> Option<&'static str> { + Some("ComplexInput") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_input_object_type::()(&[ + registry.arg::("requiredField"), + registry.arg::>("intField"), + registry.arg::>("stringField"), + registry.arg::>("booleanField"), + registry.arg::>>>("stringListField"), + ]) + .into_meta() + } +} + +impl FromInputValue for ComplexInput { + fn from(v: &InputValue) -> Option { + let obj = match v.to_object_value() { + Some(o) => o, + None => return None, + }; + + Some(ComplexInput { + required_field: match obj.get("requiredField").and_then(|v| v.convert()) { + Some(f) => f, + None => return None, + }, + int_field: obj.get("intField").and_then(|v| v.convert()), + string_field: obj.get("stringField").and_then(|v| v.convert()), + boolean_field: obj.get("booleanField").and_then(|v| v.convert()), + string_list_field: obj.get("stringListField").and_then(|v| v.convert()), + }) + } +} + +impl GraphQLType for ComplicatedArgs { + fn name() -> Option<&'static str> { + Some("ComplicatedArgs") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("intArgField") + .argument(registry.arg::>("intArg")), + registry.field::>("nonNullIntArgField") + .argument(registry.arg::("nonNullIntArg")), + registry.field::>("stringArgField") + .argument(registry.arg::>("stringArg")), + registry.field::>("booleanArgField") + .argument(registry.arg::>("booleanArg")), + registry.field::>("enumArgField") + .argument(registry.arg::>("enumArg")), + registry.field::>("floatArgField") + .argument(registry.arg::>("floatArg")), + registry.field::>("idArgField") + .argument(registry.arg::>("idArg")), + registry.field::>("stringListArgField") + .argument(registry.arg::>>>("stringListArg")), + registry.field::>("complexArgField") + .argument(registry.arg::>("complexArg")), + registry.field::>("multipleReqs") + .argument(registry.arg::("req1")) + .argument(registry.arg::("req2")), + registry.field::>("multipleOpts") + .argument(registry.arg_with_default("opt1", &0i64)) + .argument(registry.arg_with_default("opt2", &0i64)), + registry.field::>("multipleOptAndReq") + .argument(registry.arg::("req1")) + .argument(registry.arg::("req2")) + .argument(registry.arg_with_default("opt1", &0i64)) + .argument(registry.arg_with_default("opt2", &0i64)), + ]) + .into_meta() + } +} + +impl GraphQLType for QueryRoot { + fn name() -> Option<&'static str> { + Some("QueryRoot") + } + + fn meta(registry: &mut Registry) -> MetaType { + registry.build_object_type::()(&[ + registry.field::>("human") + .argument(registry.arg::>("id")), + registry.field::>("alien"), + registry.field::>("dog"), + registry.field::>("cat"), + registry.field::>("pet"), + registry.field::>("catOrDog"), + registry.field::>("dorOrHuman"), + registry.field::>("humanOrAlien"), + registry.field::>("complicatedArgs"), + ]) + .into_meta() + } +} + +pub fn validate<'a, R, V, F>(r: R, q: &str, factory: F) + -> Vec + where R: GraphQLType<()>, + V: Visitor<'a> + 'a, + F: Fn() -> V +{ + let mut root = RootNode::<(), R, ()>::new(r, ()); + + root.schema.add_directive(DirectiveType::new("onQuery", &[DirectiveLocation::Query], &[])); + root.schema.add_directive(DirectiveType::new("onMutation", &[DirectiveLocation::Mutation], &[])); + root.schema.add_directive(DirectiveType::new("onField", &[DirectiveLocation::Field], &[])); + root.schema.add_directive(DirectiveType::new("onFragmentDefinition", &[DirectiveLocation::FragmentDefinition], &[])); + root.schema.add_directive(DirectiveType::new("onFragmentSpread", &[DirectiveLocation::FragmentSpread], &[])); + root.schema.add_directive(DirectiveType::new("onInlineFragment", &[DirectiveLocation::InlineFragment], &[])); + + let doc = parse_document_source(q) + .expect(&format!("Parse error on input {:#?}", q)); + let mut ctx = ValidatorContext::new( + unsafe { ::std::mem::transmute(&root.schema) }, + &doc); + + let mut mv = MultiVisitor::new(vec![ Box::new(factory()) ]); + visit(&mut mv, &mut ctx, unsafe { ::std::mem::transmute(&doc) }); + + ctx.into_errors() +} + +pub fn expect_passes_rule<'a, V, F>(factory: F, q: &str) + where V: Visitor<'a> + 'a, + F: Fn() -> V +{ + expect_passes_rule_with_schema(QueryRoot, factory, q); +} + +pub fn expect_passes_rule_with_schema<'a, R, V, F>(r: R, factory: F, q: &str) + where R: GraphQLType<()>, + V: Visitor<'a> + 'a, + F: Fn() -> V +{ + let errs = validate(r, q, factory); + + if !errs.is_empty() { + print_errors(&errs); + panic!("Expected rule to pass, but errors found"); + } +} + +pub fn expect_fails_rule<'a, V, F>(factory: F, q: &str, expected_errors: &[RuleError]) + where V: Visitor<'a> + 'a, + F: Fn() -> V +{ + expect_fails_rule_with_schema(QueryRoot, factory, q, expected_errors); +} + +pub fn expect_fails_rule_with_schema<'a, R, V, F>(r: R, factory: F, q: &str, expected_errors: &[RuleError]) + where R: GraphQLType<()>, + V: Visitor<'a> + 'a, + F: Fn() -> V +{ + let errs = validate(r, q, factory); + + if errs.is_empty() { + panic!("Expected rule to fail, but no errors were found"); + } + else if errs != expected_errors { + println!("==> Expected errors:"); + print_errors(expected_errors); + + println!("\n==> Actual errors:"); + print_errors(&errs); + + panic!("Unexpected set of errors found"); + } +} + +fn print_errors(errs: &[RuleError]) { + for err in errs { + for p in err.locations() { + print!("[{:>3},{:>3},{:>3}] ", p.index(), p.line(), p.column()); + } + println!("{}", err.message()); + } +} diff --git a/src/validation/traits.rs b/src/validation/traits.rs new file mode 100644 index 00000000..c6913cdb --- /dev/null +++ b/src/validation/traits.rs @@ -0,0 +1,65 @@ +use ast::{Document, Operation, Fragment, VariableDefinition, Selection, + Directive, InputValue, Field, FragmentSpread, InlineFragment}; +use parser::Spanning; +use validation::ValidatorContext; + + +#[doc(hidden)] +pub trait Visitor<'a> { + fn enter_document(&mut self, _: &mut ValidatorContext<'a>, _: &'a Document) {} + fn exit_document(&mut self, _: &mut ValidatorContext<'a>, _: &'a Document) {} + + fn enter_operation_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + fn exit_operation_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + + fn enter_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + fn exit_fragment_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + + fn enter_variable_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a (Spanning, VariableDefinition)) {} + fn exit_variable_definition(&mut self, _: &mut ValidatorContext<'a>, _: &'a (Spanning, VariableDefinition)) {} + + fn enter_directive(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + fn exit_directive(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + + fn enter_argument(&mut self, _: &mut ValidatorContext<'a>, _: &'a (Spanning, Spanning)) {} + fn exit_argument(&mut self, _: &mut ValidatorContext<'a>, _: &'a (Spanning, Spanning)) {} + + fn enter_selection_set(&mut self, _: &mut ValidatorContext<'a>, _: &'a Vec) {} + fn exit_selection_set(&mut self, _: &mut ValidatorContext<'a>, _: &'a Vec) {} + + fn enter_field(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + fn exit_field(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + + fn enter_fragment_spread(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + fn exit_fragment_spread(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + + fn enter_inline_fragment(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + fn exit_inline_fragment(&mut self, _: &mut ValidatorContext<'a>, _: &'a Spanning) {} + + fn enter_int_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning) {} + fn exit_int_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning) {} + + fn enter_float_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning) {} + fn exit_float_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning) {} + + fn enter_string_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a String>) {} + fn exit_string_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a String>) {} + + fn enter_boolean_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning) {} + fn exit_boolean_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning) {} + + fn enter_enum_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a String>) {} + fn exit_enum_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a String>) {} + + fn enter_variable_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a String>) {} + fn exit_variable_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a String>) {} + + fn enter_list_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a Vec>>) {} + fn exit_list_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a Vec>>) {} + + fn enter_object_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a Vec<(Spanning, Spanning)>>) {} + fn exit_object_value(&mut self, _: &mut ValidatorContext<'a>, _: Spanning<&'a Vec<(Spanning, Spanning)>>) {} + + fn enter_object_field(&mut self, _: &mut ValidatorContext<'a>, _: &'a (Spanning, Spanning)) {} + fn exit_object_field(&mut self, _: &mut ValidatorContext<'a>, _: &'a (Spanning, Spanning)) {} +} diff --git a/src/validation/visitor.rs b/src/validation/visitor.rs new file mode 100644 index 00000000..a3814280 --- /dev/null +++ b/src/validation/visitor.rs @@ -0,0 +1,260 @@ +use ast::{Definition, Document, Fragment, VariableDefinitions, Type, InputValue, + Directive, Arguments, Selection, Field, FragmentSpread, InlineFragment, + Operation, OperationType}; +use schema::meta::Argument; +use parser::Spanning; +use validation::{Visitor, ValidatorContext}; + +#[doc(hidden)] +pub fn visit<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, d: &'a Document) { + v.enter_document(ctx, d); + visit_definitions(v, ctx, d); + v.exit_document(ctx, d); +} + +fn visit_definitions<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, d: &'a Vec) { + for def in d { + let def_type = match *def { + Definition::Fragment(Spanning { + item: Fragment { type_condition: Spanning { item: ref name, .. }, .. }, .. }) => + Some(Type::NonNullNamed(name.to_owned())), + Definition::Operation(Spanning { + item: Operation { operation_type: OperationType::Query, .. }, .. }) => + Some(Type::NonNullNamed(ctx.schema.concrete_query_type().name().unwrap().to_owned())), + Definition::Operation(Spanning { + item: Operation { operation_type: OperationType::Mutation, .. }, .. }) => + ctx.schema.concrete_mutation_type() + .map(|t| Type::NonNullNamed(t.name().unwrap().to_owned())), + }; + + ctx.with_pushed_type(def_type.as_ref(), |ctx| { + enter_definition(v, ctx, def); + visit_definition(v, ctx, def); + exit_definition(v, ctx, def); + }); + } +} + +fn enter_definition<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, def: &'a Definition) { + match *def { + Definition::Operation(ref op) => v.enter_operation_definition(ctx, op), + Definition::Fragment(ref f) => v.enter_fragment_definition(ctx, f), + } +} + +fn exit_definition<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, def: &'a Definition) { + match *def { + Definition::Operation(ref op) => v.exit_operation_definition(ctx, op), + Definition::Fragment(ref f) => v.exit_fragment_definition(ctx, f), + } +} + +fn visit_definition<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, def: &'a Definition) { + match *def { + Definition::Operation(ref op) => { + visit_variable_definitions(v, ctx, &op.item.variable_definitions); + visit_directives(v, ctx, &op.item.directives); + visit_selection_set(v, ctx, &op.item.selection_set); + }, + Definition::Fragment(ref f) => { + visit_directives(v, ctx, &f.item.directives); + visit_selection_set(v, ctx, &f.item.selection_set); + }, + } +} + +fn visit_variable_definitions<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, defs: &'a Option>) { + if let Some(ref defs) = *defs { + for def in defs.item.iter() { + let var_type = def.1.var_type.item.clone(); + + ctx.with_pushed_input_type(Some(&var_type), |ctx| { + v.enter_variable_definition(ctx, def); + + if let Some(ref default_value) = def.1.default_value { + visit_input_value(v, ctx, default_value); + } + + v.exit_variable_definition(ctx, def); + }) + } + } +} + +fn visit_directives<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, directives: &'a Option>>) { + if let Some(ref directives) = *directives { + for directive in directives { + let directive_arguments = ctx.schema.directive_by_name(&directive.item.name.item).map(|d| &d.arguments); + + v.enter_directive(ctx, directive); + visit_arguments(v, ctx, &directive_arguments, &directive.item.arguments); + v.exit_directive(ctx, directive); + } + } +} + +fn visit_arguments<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, meta_args: &Option<&Vec>, arguments: &'a Option>) { + if let Some(ref arguments) = *arguments { + for argument in arguments.item.iter() { + let arg_type = meta_args + .and_then(|args| args.iter().filter(|a| a.name == argument.0.item).next()) + .map(|a| &a.arg_type); + + ctx.with_pushed_input_type(arg_type, |ctx| { + v.enter_argument(ctx, argument); + + visit_input_value(v, ctx, &argument.1); + + v.exit_argument(ctx, argument); + }) + } + } +} + +fn visit_selection_set<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, selection_set: &'a Vec) { + ctx.with_pushed_parent_type(|ctx| { + v.enter_selection_set(ctx, selection_set); + + for selection in selection_set.iter() { + visit_selection(v, ctx, selection); + } + + v.exit_selection_set(ctx, selection_set); + }); +} + +fn visit_selection<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, selection: &'a Selection) { + match *selection { + Selection::Field(ref field) => visit_field(v, ctx, field), + Selection::FragmentSpread(ref spread) => visit_fragment_spread(v, ctx, spread), + Selection::InlineFragment(ref fragment) => visit_inline_fragment(v, ctx, fragment), + } +} + +fn visit_field<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, field: &'a Spanning) { + let meta_field = ctx.parent_type() + .and_then(|t| t.field_by_name(&field.item.name.item)); + + let field_type = meta_field.map(|f| &f.field_type); + let field_args = meta_field.and_then(|f| f.arguments.as_ref()); + + ctx.with_pushed_type(field_type, |ctx| { + v.enter_field(ctx, field); + + visit_arguments(v, ctx, &field_args, &field.item.arguments); + visit_directives(v, ctx, &field.item.directives); + + if let Some(ref selection_set) = field.item.selection_set { + visit_selection_set(v, ctx, selection_set); + } + + v.exit_field(ctx, field); + }); +} + +fn visit_fragment_spread<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, spread: &'a Spanning) { + v.enter_fragment_spread(ctx, spread); + + visit_directives(v, ctx, &spread.item.directives); + + v.exit_fragment_spread(ctx, spread); +} + +fn visit_inline_fragment<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, fragment: &'a Spanning) { + let type_name = fragment.item.type_condition.clone().map(|s| s.item); + + let mut visit_fn = move |ctx: &mut ValidatorContext<'a>| { + v.enter_inline_fragment(ctx, fragment); + + visit_directives(v, ctx, &fragment.item.directives); + visit_selection_set(v, ctx, &fragment.item.selection_set); + + v.exit_inline_fragment(ctx, fragment); + }; + + if let Some(type_name) = type_name { + ctx.with_pushed_type(Some(&Type::NonNullNamed(type_name)), visit_fn); + } + else { + visit_fn(ctx); + } +} + +fn visit_input_value<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, input_value: &'a Spanning) { + enter_input_value(v, ctx, input_value); + + match input_value.item { + InputValue::Object(ref fields) => { + for field in fields { + let inner_type = ctx.current_input_type_literal() + .and_then(|t| match *t { + Type::NonNullNamed(ref name) | Type::Named(ref name) => + ctx.schema.concrete_type_by_name(name), + _ => None, + }) + .and_then(|ct| ct.input_field_by_name(&field.0.item)) + .map(|f| &f.arg_type); + + ctx.with_pushed_input_type(inner_type, |ctx| { + v.enter_object_field(ctx, field); + visit_input_value(v, ctx, &field.1); + v.exit_object_field(ctx, field); + }) + } + } + InputValue::List(ref ls) => { + let inner_type = ctx.current_input_type_literal().and_then(|t| match *t { + Type::List(ref inner) | Type::NonNullList(ref inner) => + Some(inner.as_ref().clone()), + _ => None, + }); + + ctx.with_pushed_input_type(inner_type.as_ref(), |ctx| { + for value in ls { + visit_input_value(v, ctx, value); + } + }) + } + _ => (), + } + + exit_input_value(v, ctx, input_value); +} + +fn enter_input_value<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, input_value: &'a Spanning) { + use InputValue::*; + + let start = &input_value.start; + let end = &input_value.end; + + match input_value.item { + Null => panic!("null values can't appear in the AST"), + Int(ref i) => v.enter_int_value(ctx, Spanning::start_end(start, end, *i)), + Float(ref f) => v.enter_float_value(ctx, Spanning::start_end(start, end, *f)), + String(ref s) => v.enter_string_value(ctx, Spanning::start_end(start, end, s)), + Boolean(ref b) => v.enter_boolean_value(ctx, Spanning::start_end(start, end, *b)), + Enum(ref s) => v.enter_enum_value(ctx, Spanning::start_end(start, end, s)), + Variable(ref s) => v.enter_variable_value(ctx, Spanning::start_end(start, end, s)), + List(ref l) => v.enter_list_value(ctx, Spanning::start_end(start, end, l)), + Object(ref o) => v.enter_object_value(ctx, Spanning::start_end(start, end, o)), + } +} + +fn exit_input_value<'a, V: Visitor<'a>>(v: &mut V, ctx: &mut ValidatorContext<'a>, input_value: &'a Spanning) { + use InputValue::*; + + let start = &input_value.start; + let end = &input_value.end; + + match input_value.item { + Null => panic!("null values can't appear in the AST"), + Int(ref i) => v.exit_int_value(ctx, Spanning::start_end(start, end, *i)), + Float(ref f) => v.exit_float_value(ctx, Spanning::start_end(start, end, *f)), + String(ref s) => v.exit_string_value(ctx, Spanning::start_end(start, end, s)), + Boolean(ref b) => v.exit_boolean_value(ctx, Spanning::start_end(start, end, *b)), + Enum(ref s) => v.exit_enum_value(ctx, Spanning::start_end(start, end, s)), + Variable(ref s) => v.exit_variable_value(ctx, Spanning::start_end(start, end, s)), + List(ref l) => v.exit_list_value(ctx, Spanning::start_end(start, end, l)), + Object(ref o) => v.exit_object_value(ctx, Spanning::start_end(start, end, o)), + } +} diff --git a/src/value.rs b/src/value.rs new file mode 100644 index 00000000..7df16b3f --- /dev/null +++ b/src/value.rs @@ -0,0 +1,123 @@ +use std::collections::HashMap; +use std::hash::Hash; + +use rustc_serialize::json::{ToJson, Json}; + +use parser::Spanning; +use ast::{InputValue, ToInputValue}; + +/// Serializable value returned from query and field execution. +/// +/// Used by the execution engine and resolvers to build up the response +/// structure. Similar to the `Json` type found in the serialize crate. +/// +/// It is also similar to the `InputValue` type, but can not contain enum +/// values or variables. Also, lists and objects do not contain any location +/// information since they are generated by resolving fields and values rather +/// than parsing a source query. +#[derive(Debug, PartialEq)] +#[allow(missing_docs)] +pub enum Value { + Null, + Int(i64), + Float(f64), + String(String), + Boolean(bool), + List(Vec), + Object(HashMap), +} + +impl Value { + // CONSTRUCTORS + + /// Construct a null value. + pub fn null() -> Value { Value::Null } + + /// Construct an integer value. + pub fn int(i: i64) -> Value { Value::Int(i) } + + /// Construct a floating point value. + pub fn float(f: f64) -> Value { Value::Float(f) } + + /// Construct a string value. + pub fn string>(s: T) -> Value { Value::String(s.as_ref().to_owned()) } + + /// Construct a boolean value. + pub fn boolean(b: bool) -> Value { Value::Boolean(b) } + + /// Construct a list value. + pub fn list(l: Vec) -> Value { Value::List(l) } + + /// Construct an object value. + pub fn object(o: HashMap) -> Value + where K: AsRef + Eq + Hash + { + Value::Object( + o.into_iter().map(|(k, v)| (k.as_ref().to_owned(), v)).collect() + ) + } + + // DISCRIMINATORS + + /// Does this value represent null? + pub fn is_null(&self) -> bool { + match *self { + Value::Null => true, + _ => false, + } + } + + /// View the underlying object value, if present. + pub fn as_object_value(&self) -> Option<&HashMap> { + match *self { + Value::Object(ref o) => Some(o), + _ => None, + } + } + + /// View the underlying list value, if present. + pub fn as_list_value(&self) -> Option<&Vec> { + match *self { + Value::List(ref l) => Some(l), + _ => None, + } + } + + /// View the underlying string value, if present. + pub fn as_string_value(&self) -> Option<&str> { + match *self { + Value::String(ref s) => Some(s), + _ => None, + } + } +} + +impl ToJson for Value { + fn to_json(&self) -> Json { + match *self { + Value::Null => Json::Null, + Value::Int(i) => Json::I64(i), + Value::Float(f) => Json::F64(f), + Value::String(ref s) => Json::String(s.clone()), + Value::Boolean(b) => Json::Boolean(b), + Value::List(ref l) => Json::Array(l.iter().map(|x| x.to_json()).collect()), + Value::Object(ref o) => Json::Object(o.iter().map(|(k,v)| (k.clone(), v.to_json())).collect()), + } + } +} + +impl ToInputValue for Value { + fn to(&self) -> InputValue { + match *self { + Value::Null => InputValue::Null, + Value::Int(i) => InputValue::Int(i), + Value::Float(f) => InputValue::Float(f), + Value::String(ref s) => InputValue::String(s.clone()), + Value::Boolean(b) => InputValue::Boolean(b), + Value::List(ref l) => InputValue::List(l.iter().map(|x| + Spanning::unlocated(x.to())).collect()), + Value::Object(ref o) => InputValue::Object(o.iter().map(|(k,v)| + (Spanning::unlocated(k.clone()), Spanning::unlocated(v.to()))).collect()), + } + } +}