Merge branch 'graphql-rust:master' into impl-hashset-as-vec

This commit is contained in:
mx 2022-09-28 16:47:41 +13:00 committed by GitHub
commit 601bda784e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
327 changed files with 13156 additions and 5727 deletions

View file

@ -21,15 +21,16 @@ jobs:
################
pr:
if: ${{ github.event_name == 'pull_request'
&& !contains(github.event.head_commit.message, '[skip ci]') }}
if: ${{ github.event_name == 'pull_request' }}
needs:
- bench
- clippy
- example
- feature
- release-check
- rustfmt
- test
- test-book
- wasm
runs-on: ubuntu-latest
steps:
@ -43,9 +44,6 @@ jobs:
##########################
clippy:
if: ${{ github.ref == 'refs/heads/master'
|| startsWith(github.ref, 'refs/tags/juniper')
|| !contains(github.event.head_commit.message, '[skip ci]') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -58,9 +56,6 @@ jobs:
- run: make cargo.lint
rustfmt:
if: ${{ github.ref == 'refs/heads/master'
|| startsWith(github.ref, 'refs/tags/juniper')
|| !contains(github.event.head_commit.message, '[skip ci]') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -79,10 +74,20 @@ jobs:
# Testing #
###########
bench:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: cargo clippy -p juniper_benchmarks --benches -- -D warnings
- run: cargo bench -p juniper_benchmarks
example:
if: ${{ github.ref == 'refs/heads/master'
|| startsWith(github.ref, 'refs/tags/juniper')
|| !contains(github.event.head_commit.message, '[skip ci]') }}
strategy:
fail-fast: false
matrix:
@ -111,9 +116,6 @@ jobs:
- run: cargo check -p example_${{ matrix.example }}
feature:
if: ${{ github.ref == 'refs/heads/master'
|| startsWith(github.ref, 'refs/tags/juniper')
|| !contains(github.event.head_commit.message, '[skip ci]') }}
strategy:
fail-fast: false
matrix:
@ -174,9 +176,6 @@ jobs:
- run: cargo package -p ${{ steps.crate.outputs.NAME }}
test:
if: ${{ github.ref == 'refs/heads/master'
|| startsWith(github.ref, 'refs/tags/juniper')
|| !contains(github.event.head_commit.message, '[skip ci]') }}
strategy:
fail-fast: false
matrix:
@ -187,7 +186,6 @@ jobs:
- juniper_graphql_ws
- juniper_integration_tests
- juniper_codegen_tests
- juniper_book_tests
- juniper_actix
- juniper_hyper
- juniper_iron
@ -210,14 +208,6 @@ jobs:
os: macOS
- crate: juniper_codegen_tests
os: windows
- crate: juniper_book_tests
toolchain: beta
- crate: juniper_book_tests
toolchain: nightly
# TODO: LLVM ERROR: out of memory
- crate: juniper_integration_tests
os: windows
runs-on: ${{ matrix.os }}-latest
steps:
- uses: actions/checkout@v3
@ -229,10 +219,35 @@ jobs:
- run: make test.cargo crate=${{ matrix.crate }}
test-book:
name: test Book
strategy:
fail-fast: false
matrix:
os:
- ubuntu
- macOS
# TODO: Re-enable once rust-lang/rust#99466 is fixed:
# https://github.com/rust-lang/rust/issues/99466
#- windows
toolchain:
- stable
- beta
- nightly
runs-on: ${{ matrix.os }}-latest
steps:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.toolchain }}
override: true
- run: cargo install mdbook
- run: make test.book
wasm:
if: ${{ github.ref == 'refs/heads/master'
|| startsWith(github.ref, 'refs/tags/juniper')
|| !contains(github.event.head_commit.message, '[skip ci]') }}
strategy:
fail-fast: false
matrix:
@ -264,9 +279,7 @@ jobs:
release-check:
name: Check release automation
if: ${{ !startsWith(github.ref, 'refs/tags/juniper')
&& (github.ref == 'refs/heads/master'
|| !contains(github.event.head_commit.message, '[skip ci]')) }}
if: ${{ !startsWith(github.ref, 'refs/tags/juniper') }}
strategy:
fail-fast: false
matrix:
@ -296,12 +309,14 @@ jobs:
release-github:
name: Release on GitHub
needs:
- bench
- clippy
- example
- feature
- package
- rustfmt
- test
- test-book
- wasm
if: ${{ startsWith(github.ref, 'refs/tags/juniper') }}
runs-on: ubuntu-latest
@ -325,7 +340,7 @@ jobs:
- name: Parse CHANGELOG link
id: changelog
run: echo ::set-output
name=LINK::https://github.com/${{ github.repository }}/blob/${{ steps.crate.outputs.NAME }}%40${{ steps.release.outputs.VERSION }}//${{ steps.crate.outputs.NAME }}/CHANGELOG.md#$(sed -n '/^## \[${{ steps.release.outputs.VERSION }}\]/{s/^## \[\(.*\)\][^0-9]*\([0-9].*\)/\1--\2/;s/[^0-9a-z-]*//g;p;}' ${{ steps.crate.outputs.NAME }}/CHANGELOG.md)
name=LINK::${{ github.server_url }}/${{ github.repository }}/blob/${{ steps.crate.outputs.NAME }}%40${{ steps.release.outputs.VERSION }}//${{ steps.crate.outputs.NAME }}/CHANGELOG.md#$(sed -n '/^## \[${{ steps.release.outputs.VERSION }}\]/{s/^## \[\(.*\)\][^0-9]*\([0-9].*\)/\1--\2/;s/[^0-9a-z-]*//g;p;}' ${{ steps.crate.outputs.NAME }}/CHANGELOG.md)
- uses: softprops/action-gh-release@v1
env:
@ -368,19 +383,17 @@ jobs:
deploy-book:
name: deploy Book
needs: ["test"]
needs: ["test", "test-book"]
if: ${{ github.ref == 'refs/heads/master'
|| startsWith(github.ref, 'refs/tags/juniper@') }}
|| startsWith(github.ref, 'refs/tags/juniper@') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: peaceiris/actions-mdbook@v1
- run: make book.build out=gh-pages/master
if: ${{ github.ref == 'refs/heads/master' }}
- run: make book.build out=gh-pages
if: ${{ startsWith(github.ref, 'refs/tags/juniper@') }}
- run: make book.build out=gh-pages${{ (github.ref == 'refs/heads/master'
&& '/master')
|| '' }}
- name: Deploy to GitHub Pages
uses: peaceiris/actions-gh-pages@v3

View file

@ -17,19 +17,16 @@ Before submitting a PR, you should follow these steps to prevent redundant churn
Consistent formatting is enforced on the CI.
Before you submit your PR, you should run `cargo fmt` in the root directory.
Before you submit your PR, you should run `cargo +nightly fmt --all` in the root directory (or use the `make fmt` shortcut).
Formatting should be run on the **stable** compiler.
(You can do `rustup run stable cargo fmt` when developing on nightly)
Formatting should be run on the **nightly** compiler.
### Run all tests
To run all available tests, including verifying the code examples in the book,
you can use [cargo-make](https://github.com/sagiegurari/cargo-make).
To run all available tests, including verifying the code examples in the book:
1. Install cargo-make with `cargo install cargo-make`
2. Run `cargo make ci-flow` in the root directory
(You can do `rustup run nightly cargo make ci-flow` to run all tests when developing on stable)
1. Run `cargo test` in the root directory.
2. Run `make test.book` in the root directory.
### Update the CHANGELOG

View file

@ -1,7 +1,6 @@
[workspace]
members = [
"benches",
"book/tests",
"examples/basic_subscriptions",
"examples/warp_async",
"examples/warp_subscriptions",

View file

@ -87,10 +87,14 @@ cargo.test: test.cargo
# Run Rust tests of Book.
#
# Usage:
# make test.book
# make test.book [clean=(no|yes)]
test.book:
@make test.cargo crate=juniper_book_tests
ifeq ($(clean),yes)
cargo clean
endif
cargo build
mdbook test book -L target/debug/deps
# Run Rust tests of project crates.

View file

@ -47,7 +47,7 @@ see the [actix][actix_examples], [hyper][hyper_examples], [rocket][rocket_exampl
## Features
Juniper supports the full GraphQL query language according to the
[specification][graphql_spec], including interfaces, unions, schema
[specification (October 2021)][graphql_spec], including interfaces, unions, schema
introspection, and validations. It can also output the schema in the [GraphQL Schema Language][schema_language].
As an exception to other GraphQL libraries for other languages, Juniper builds
@ -97,7 +97,7 @@ Juniper has not reached 1.0 yet, thus some API instability should be expected.
[graphiql]: https://github.com/graphql/graphiql
[playground]: https://github.com/prisma/graphql-playground
[iron]: https://github.com/iron/iron
[graphql_spec]: http://facebook.github.io/graphql
[graphql_spec]: https://spec.graphql.org/October2021
[schema_language]: https://graphql.org/learn/schema/#type-language
[schema_approach]: https://blog.logrocket.com/code-first-vs-schema-first-development-graphql/
[test_schema_rs]: https://github.com/graphql-rust/juniper/blob/master/juniper/src/tests/fixtures/starwars/schema.rs

View file

@ -1,7 +1,7 @@
[package]
name = "juniper_benchmarks"
version = "0.0.0"
edition = "2018"
edition = "2021"
authors = ["Christoph Herzog <chris@theduke.at>"]
publish = false
@ -10,7 +10,7 @@ futures = "0.3"
juniper = { path = "../juniper" }
[dev-dependencies]
criterion = "0.3"
criterion = "0.4"
tokio = { version = "1.0", features = ["rt-multi-thread"] }
[[bench]]

View file

@ -1,10 +1,11 @@
use criterion::{criterion_group, criterion_main, Criterion, ParameterizedBenchmark};
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use juniper::InputValue;
use juniper_benchmarks as j;
fn bench_sync_vs_async_users_flat_instant(c: &mut Criterion) {
const ASYNC_QUERY: &'static str = r#"
// language=GraphQL
const ASYNC_QUERY: &str = r#"
query Query($id: Int) {
users_async_instant(ids: [$id]!) {
id
@ -15,74 +16,70 @@ fn bench_sync_vs_async_users_flat_instant(c: &mut Criterion) {
}
"#;
const SYNC_QUERY: &'static str = r#"
query Query($id: Int) {
users_sync_instant(ids: [$id]!) {
id
kind
username
email
// language=GraphQL
const SYNC_QUERY: &str = r#"
query Query($id: Int) {
users_sync_instant(ids: [$id]!) {
id
kind
username
email
}
}
"#;
let mut group = c.benchmark_group("Sync vs Async - Users Flat - Instant");
for count in [1, 10] {
group.bench_function(BenchmarkId::new("Sync", count), |b| {
let ids = (0..count)
.map(|x| InputValue::scalar(x as i32))
.collect::<Vec<_>>();
let ids = InputValue::list(ids);
b.iter(|| {
j::execute_sync(
SYNC_QUERY,
vec![("ids".to_owned(), ids.clone())].into_iter().collect(),
)
})
});
group.bench_function(BenchmarkId::new("Async - Single Thread", count), |b| {
let rt = tokio::runtime::Builder::new_current_thread()
.build()
.unwrap();
let ids = (0..count)
.map(|x| InputValue::scalar(x as i32))
.collect::<Vec<_>>();
let ids = InputValue::list(ids);
b.iter(|| {
let f = j::execute(
ASYNC_QUERY,
vec![("ids".to_owned(), ids.clone())].into_iter().collect(),
);
rt.block_on(f)
})
});
group.bench_function(BenchmarkId::new("Async - Threadpool", count), |b| {
let rt = tokio::runtime::Builder::new_multi_thread().build().unwrap();
let ids = (0..count)
.map(|x| InputValue::scalar(x as i32))
.collect::<Vec<_>>();
let ids = InputValue::list(ids);
b.iter(|| {
let f = j::execute(
ASYNC_QUERY,
vec![("ids".to_owned(), ids.clone())].into_iter().collect(),
);
rt.block_on(f)
})
});
}
"#;
c.bench(
"Sync vs Async - Users Flat - Instant",
ParameterizedBenchmark::new(
"Sync",
|b, count| {
let ids = (0..*count)
.map(|x| InputValue::scalar(x as i32))
.collect::<Vec<_>>();
let ids = InputValue::list(ids);
b.iter(|| {
j::execute_sync(
SYNC_QUERY,
vec![("ids".to_string(), ids.clone())].into_iter().collect(),
)
})
},
vec![1, 10],
)
.with_function("Async - Single Thread", |b, count| {
let mut rt = tokio::runtime::Builder::new()
.basic_scheduler()
.build()
.unwrap();
let ids = (0..*count)
.map(|x| InputValue::scalar(x as i32))
.collect::<Vec<_>>();
let ids = InputValue::list(ids);
b.iter(|| {
let f = j::execute(
ASYNC_QUERY,
vec![("ids".to_string(), ids.clone())].into_iter().collect(),
);
rt.block_on(f)
})
})
.with_function("Async - Threadpool", |b, count| {
let mut rt = tokio::runtime::Builder::new()
.threaded_scheduler()
.build()
.unwrap();
let ids = (0..*count)
.map(|x| InputValue::scalar(x as i32))
.collect::<Vec<_>>();
let ids = InputValue::list(ids);
b.iter(|| {
let f = j::execute(
ASYNC_QUERY,
vec![("ids".to_string(), ids.clone())].into_iter().collect(),
);
rt.block_on(f)
})
}),
);
group.finish();
}
criterion_group!(benches, bench_sync_vs_async_users_flat_instant);

View file

@ -11,11 +11,11 @@ pub type QueryResult = Result<
String,
>;
pub struct Context {}
pub struct Context;
impl Context {
fn new() -> Self {
Self {}
Self
}
}
@ -51,8 +51,8 @@ impl User {
Self {
id,
kind: UserKind::Admin,
username: "userx".to_string(),
email: "userx@domain.com".to_string(),
username: "userx".into(),
email: "userx@domain.com".into(),
gender: Some(Gender::Female),
}
}
@ -97,7 +97,7 @@ pub fn new_schema() -> RootNode<'static, Query, EmptyMutation<Context>, EmptySub
pub fn execute_sync(query: &str, vars: Variables) -> QueryResult {
let root = new_schema();
let ctx = Context::new();
juniper::execute_sync(query, None, &root, &vars, &ctx).map_err(|e| format!("{:?}", e))
juniper::execute_sync(query, None, &root, &vars, &ctx).map_err(|e| format!("{e:?}"))
}
pub async fn execute(query: &str, vars: Variables) -> QueryResult {
@ -105,5 +105,5 @@ pub async fn execute(query: &str, vars: Variables) -> QueryResult {
let ctx = Context::new();
juniper::execute(query, None, &root, &vars, &ctx)
.await
.map_err(|e| format!("{:?}", e))
.map_err(|e| format!("{e:?}"))
}

View file

@ -47,21 +47,8 @@ The output will be in the `_rendered/` directory.
To run the tests validating all code examples in the book, run:
```bash
cd tests/
cargo test
# or from project root dir:
cargo test -p juniper_book_tests
mdbook test -L ../target/debug/deps
# or via shortcut from project root dir:
make test.book
```
## Test setup
All Rust code examples in the book are compiled on the CI.
This is done using the [skeptic](https://github.com/budziq/rust-skeptic) crate.

View file

@ -13,4 +13,4 @@ create-missing = false
git_repository_url = "https://github.com/graphql-rs/juniper"
[rust]
edition = "2018"
edition = "2021"

View file

@ -21,7 +21,7 @@ embedded [Graphiql][graphiql] for easy debugging.
## Features
Juniper supports the full GraphQL query language according to the
[specification][graphql_spec], including interfaces, unions, schema
[specification (October 2021)][graphql_spec], including interfaces, unions, schema
introspection, and validations.
It does not, however, support the schema language.
@ -57,7 +57,7 @@ Juniper has not reached 1.0 yet, thus some API instability should be expected.
[graphql]: http://graphql.org
[graphiql]: https://github.com/graphql/graphiql
[iron]: https://github.com/iron/iron
[graphql_spec]: http://facebook.github.io/graphql
[graphql_spec]: https://spec.graphql.org/October2021
[test_schema_rs]: https://github.com/graphql-rust/juniper/blob/master/juniper/src/tests/schema.rs
[tokio]: https://github.com/tokio-rs/tokio
[hyper_examples]: https://github.com/graphql-rust/juniper/tree/master/juniper_hyper/examples

View file

@ -68,7 +68,7 @@ use std::env;
pub fn get_db_conn() -> Connection {
let pg_connection_string = env::var("DATABASE_URI").expect("need a db uri");
println!("Connecting to {}", pg_connection_string);
println!("Connecting to {pg_connection_string}");
let conn = Connection::connect(&pg_connection_string[..], TlsMode::None).unwrap();
println!("Connection is fine");
conn
@ -101,7 +101,7 @@ impl BatchFn<i32, Cult> for CultBatcher {
// A hashmap is used, as we need to return an array which maps each original key to a Cult.
async fn load(&self, keys: &[i32]) -> HashMap<i32, Cult> {
println!("load cult batch {:?}", keys);
println!("load cult batch {keys:?}");
let mut cult_hashmap = HashMap::new();
get_cult_by_ids(&mut cult_hashmap, keys.to_vec());
cult_hashmap

View file

@ -66,7 +66,7 @@ type Schema = juniper::RootNode<
fn main() {
// Create a context object.
let ctx = Context{};
let ctx = Context;
// Run the built-in introspection query.
let (res, _errors) = juniper::introspect(

View file

@ -25,9 +25,11 @@ This example shows a subscription operation that returns two events, the strings
sequentially:
```rust
# use juniper::{graphql_object, graphql_subscription, FieldError};
# use futures::Stream;
# extern crate futures;
# extern crate juniper;
# use std::pin::Pin;
# use futures::Stream;
# use juniper::{graphql_object, graphql_subscription, FieldError};
#
# #[derive(Clone)]
# pub struct Database;
@ -80,7 +82,6 @@ where [`Connection`][Connection] is a `Stream` of values returned by the operati
# extern crate juniper;
# extern crate juniper_subscriptions;
# extern crate serde_json;
# extern crate tokio;
# use juniper::{
# http::GraphQLRequest,
# graphql_object, graphql_subscription,
@ -98,7 +99,7 @@ where [`Connection`][Connection] is a `Stream` of values returned by the operati
#
# impl Database {
# fn new() -> Self {
# Self {}
# Self
# }
# }
#
@ -126,7 +127,7 @@ where [`Connection`][Connection] is a `Stream` of values returned by the operati
type Schema = RootNode<'static, Query, EmptyMutation<Database>, Subscription>;
fn schema() -> Schema {
Schema::new(Query {}, EmptyMutation::new(), Subscription {})
Schema::new(Query, EmptyMutation::new(), Subscription)
}
async fn run_subscription() {

View file

@ -130,7 +130,7 @@ impl Mutation {
type Schema = juniper::RootNode<'static, Query, Mutation, EmptySubscription<Context>>;
#
# fn main() {
# let _ = Schema::new(Query, Mutation{}, EmptySubscription::new());
# let _ = Schema::new(Query, Mutation, EmptySubscription::new());
# }
```

View file

@ -50,7 +50,7 @@ struct Root;
#[juniper::graphql_object]
impl Root {
fn foo() -> String {
"Bar".to_owned()
"Bar".into()
}
}

View file

@ -77,6 +77,190 @@ struct Human {
```
### Interfaces implementing other interfaces
GraphQL allows implementing interfaces on other interfaces in addition to objects.
```rust
# extern crate juniper;
use juniper::{graphql_interface, graphql_object, ID};
#[graphql_interface(for = [HumanValue, Luke])]
struct Node {
id: ID,
}
#[graphql_interface(impl = NodeValue, for = Luke)]
struct Human {
id: ID,
home_planet: String,
}
struct Luke {
id: ID,
}
#[graphql_object(impl = [HumanValue, NodeValue])]
impl Luke {
fn id(&self) -> &ID {
&self.id
}
// As `String` and `&str` aren't distinguished by
// GraphQL spec, you can use them interchangeably.
// Same is applied for `Cow<'a, str>`.
// ⌄⌄⌄⌄⌄⌄⌄⌄⌄⌄⌄⌄
fn home_planet() -> &'static str {
"Tatooine"
}
}
#
# fn main() {}
```
> __NOTE:__ Every interface has to specify all other interfaces/objects it implements or implemented for. Missing one of `for = ` or `impl = ` attributes is a compile-time error.
```compile_fail
# extern crate juniper;
use juniper::{graphql_interface, GraphQLObject};
#[derive(GraphQLObject)]
pub struct ObjA {
id: String,
}
#[graphql_interface(for = ObjA)]
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the evaluated program panicked at
// 'Failed to implement interface `Character` on `ObjA`: missing interface reference in implementer's `impl` attribute.'
struct Character {
id: String,
}
fn main() {}
```
### GraphQL subtyping and additional `null`able fields
GraphQL allows implementers (both objects and other interfaces) to return "subtypes" instead of an original value. Basically, this allows you to impose additional bounds on the implementation.
Valid "subtypes" are:
- interface implementer instead of an interface itself:
- `I implements T` in place of a `T`;
- `Vec<I implements T>` in place of a `Vec<T>`.
- non-null value in place of a nullable:
- `T` in place of a `Option<T>`;
- `Vec<T>` in place of a `Vec<Option<T>>`.
These rules are recursively applied, so `Vec<Vec<I implements T>>` is a valid "subtype" of a `Option<Vec<Option<Vec<Option<T>>>>>`.
Also, GraphQL allows implementers to add `null`able fields, which aren't present on an original interface.
```rust
# extern crate juniper;
use juniper::{graphql_interface, graphql_object, ID};
#[graphql_interface(for = [HumanValue, Luke])]
struct Node {
id: ID,
}
#[graphql_interface(for = HumanConnectionValue)]
struct Connection {
nodes: Vec<NodeValue>,
}
#[graphql_interface(impl = NodeValue, for = Luke)]
struct Human {
id: ID,
home_planet: String,
}
#[graphql_interface(impl = ConnectionValue)]
struct HumanConnection {
nodes: Vec<HumanValue>,
// ^^^^^^^^^^ notice not `NodeValue`
// This can happen, because every `Human` is a `Node` too, so we are just
// imposing additional bounds, which still can be resolved with
// `... on Connection { nodes }`.
}
struct Luke {
id: ID,
}
#[graphql_object(impl = [HumanValue, NodeValue])]
impl Luke {
fn id(&self) -> &ID {
&self.id
}
fn home_planet(language: Option<String>) -> &'static str {
// ^^^^^^^^^^^^^^
// Notice additional `null`able field, which is missing on `Human`.
// Resolving `...on Human { homePlanet }` will provide `None` for this
// argument.
match language.as_deref() {
None | Some("en") => "Tatooine",
Some("ko") => "타투인",
_ => todo!(),
}
}
}
#
# fn main() {}
```
Violating GraphQL "subtyping" or additional nullable field rules is a compile-time error.
```compile_fail
# extern crate juniper;
use juniper::{graphql_interface, graphql_object};
pub struct ObjA {
id: String,
}
#[graphql_object(impl = CharacterValue)]
impl ObjA {
fn id(&self, is_present: bool) -> &str {
// ^^ the evaluated program panicked at
// 'Failed to implement interface `Character` on `ObjA`: Field `id`: Argument `isPresent` of type `Boolean!`
// isn't present on the interface and so has to be nullable.'
is_present.then_some(&self.id).unwrap_or("missing")
}
}
#[graphql_interface(for = ObjA)]
struct Character {
id: String,
}
#
# fn main() {}
```
```compile_fail
# extern crate juniper;
use juniper::{graphql_interface, GraphQLObject};
#[derive(GraphQLObject)]
#[graphql(impl = CharacterValue)]
pub struct ObjA {
id: Vec<String>,
// ^^ the evaluated program panicked at
// 'Failed to implement interface `Character` on `ObjA`: Field `id`: implementor is expected to return a subtype of
// interface's return object: `[String!]!` is not a subtype of `String!`.'
}
#[graphql_interface(for = ObjA)]
struct Character {
id: String,
}
#
# fn main() {}
```
### Ignoring trait methods
We may want to omit some trait methods to be assumed as [GraphQL interface][1] fields and ignore them.
@ -289,9 +473,9 @@ struct Droid {
[1]: https://spec.graphql.org/June2018/#sec-Interfaces
[1]: https://spec.graphql.org/October2021#sec-Interfaces
[2]: https://doc.rust-lang.org/reference/types/trait-object.html
[3]: https://docs.rs/juniper/latest/juniper/trait.ScalarValue.html
[4]: https://docs.rs/juniper/latest/juniper/struct.Executor.html
[5]: https://spec.graphql.org/June2018/#sec-Objects
[5]: https://spec.graphql.org/October2021#sec-Objects
[6]: https://docs.rs/juniper/0.14.2/juniper/trait.Context.html

View file

@ -157,7 +157,7 @@ They can have custom descriptions and default values.
# extern crate juniper;
# use juniper::graphql_object;
#
struct Person {}
struct Person;
#[graphql_object]
impl Person {
@ -177,7 +177,7 @@ impl Person {
#[graphql(default)]
arg2: i32,
) -> String {
format!("{} {}", arg1, arg2)
format!("{arg1} {arg2}")
}
}
#

View file

@ -60,7 +60,7 @@ there - those errors are automatically converted into `FieldError`.
## Error payloads, `null`, and partial errors
Juniper's error behavior conforms to the [GraphQL specification](https://spec.graphql.org/June2018/#sec-Errors-and-Non-Nullability).
Juniper's error behavior conforms to the [GraphQL specification](https://spec.graphql.org/October2021#sec-Handling-Field-Errors).
When a field returns an error, the field's result is replaced by `null`, an
additional `errors` object is created at the top level of the response, and the
@ -168,7 +168,7 @@ impl Example {
# fn main() {}
```
The specified structured error information is included in the [`extensions`](https://facebook.github.io/graphql/June2018/#sec-Errors) key:
The specified structured error information is included in the [`extensions`](https://spec.graphql.org/October2021#sec-Errors) key:
```json
{
@ -242,15 +242,15 @@ impl Mutation {
if !(10 <= name.len() && name.len() <= 100) {
errors.push(ValidationError {
field: "name".to_string(),
message: "between 10 and 100".to_string()
field: "name".into(),
message: "between 10 and 100".into(),
});
}
if !(1 <= quantity && quantity <= 10) {
errors.push(ValidationError {
field: "quantity".to_string(),
message: "between 1 and 10".to_string()
field: "quantity".into(),
message: "between 1 and 10".into(),
});
}
@ -338,11 +338,11 @@ impl Mutation {
};
if !(10 <= name.len() && name.len() <= 100) {
error.name = Some("between 10 and 100".to_string());
error.name = Some("between 10 and 100".into());
}
if !(1 <= quantity && quantity <= 10) {
error.quantity = Some("between 1 and 10".to_string());
error.quantity = Some("between 1 and 10".into());
}
if error.name.is_none() && error.quantity.is_none() {
@ -436,11 +436,11 @@ impl Mutation {
};
if !(10 <= name.len() && name.len() <= 100) {
error.name = Some("between 10 and 100".to_string());
error.name = Some("between 10 and 100".into());
}
if !(1 <= quantity && quantity <= 10) {
error.quantity = Some("between 1 and 10".to_string());
error.quantity = Some("between 1 and 10".into());
}
if error.name.is_none() && error.quantity.is_none() {

View file

@ -97,6 +97,7 @@ Context cannot be specified by a mutable reference, because concurrent fields re
For example, when using async runtime with [work stealing][2] (like `tokio`), which obviously requires thread safety in addition, you will need to use a corresponding async version of `RwLock`:
```rust
# extern crate juniper;
# extern crate tokio;
# use std::collections::HashMap;
# use juniper::graphql_object;
use tokio::sync::RwLock;

View file

@ -24,10 +24,10 @@ Juniper has built-in support for:
* `String` and `&str` as `String`
* `bool` as `Boolean`
* `juniper::ID` as `ID`. This type is defined [in the
spec](http://facebook.github.io/graphql/#sec-ID) as a type that is serialized
spec](https://spec.graphql.org/October2021#sec-ID) as a type that is serialized
as a string but can be parsed from both a string and an integer.
Note that there is no built-in support for `i64`/`u64`, as the GraphQL spec [doesn't define any built-in scalars for `i64`/`u64` by default](https://spec.graphql.org/June2018/#sec-Int). You may wish to leverage a [custom GraphQL scalar](#custom-scalars) in your schema to support them.
Note that there is no built-in support for `i64`/`u64`, as the GraphQL spec [doesn't define any built-in scalars for `i64`/`u64` by default](https://spec.graphql.org/October2021#sec-Int). You may wish to leverage a [custom GraphQL scalar](#custom-scalars) in your schema to support them.
**Third party types**:
@ -114,6 +114,7 @@ All the methods used from newtype's field can be replaced with attributes:
### `#[graphql(to_output_with = <fn>)]` attribute
```rust
# extern crate juniper;
# use juniper::{GraphQLScalar, ScalarValue, Value};
#
#[derive(GraphQLScalar)]
@ -131,6 +132,7 @@ fn to_output<S: ScalarValue>(v: &Incremented) -> Value<S> {
### `#[graphql(from_input_with = <fn>)]` attribute
```rust
# extern crate juniper;
# use juniper::{GraphQLScalar, InputValue, ScalarValue};
#
#[derive(GraphQLScalar)]
@ -145,14 +147,13 @@ impl UserId {
S: ScalarValue
{
input.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", input))
.ok_or_else(|| format!("Expected `String`, found: {input}"))
.and_then(|str| {
str.strip_prefix("id: ")
.ok_or_else(|| {
format!(
"Expected `UserId` to begin with `id: `, \
found: {}",
input,
found: {input}",
)
})
})
@ -166,6 +167,7 @@ impl UserId {
### `#[graphql(parse_token_with = <fn>]` or `#[graphql(parse_token(<types>)]` attributes
```rust
# extern crate juniper;
# use juniper::{
# GraphQLScalar, InputValue, ParseScalarResult, ParseScalarValue,
# ScalarValue, ScalarToken, Value
@ -190,7 +192,7 @@ where
S: ScalarValue
{
match v {
StringOrInt::String(str) => Value::scalar(str.to_owned()),
StringOrInt::String(s) => Value::scalar(s.to_owned()),
StringOrInt::Int(i) => Value::scalar(*i),
}
}
@ -200,15 +202,12 @@ where
S: ScalarValue
{
v.as_string_value()
.map(|s| StringOrInt::String(s.to_owned()))
.map(|s| StringOrInt::String(s.into()))
.or_else(|| v.as_int_value().map(|i| StringOrInt::Int(i)))
.ok_or_else(|| format!("Expected `String` or `Int`, found: {}", v))
.ok_or_else(|| format!("Expected `String` or `Int`, found: {v}"))
}
fn parse_token<S>(value: ScalarToken<'_>) -> ParseScalarResult<'_, S>
where
S: ScalarValue
{
fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<S> {
<String as ParseScalarValue<S>>::from_str(value)
.or_else(|_| <i32 as ParseScalarValue<S>>::from_str(value))
}
@ -228,6 +227,7 @@ Path can be simply `with = Self` (default path where macro expects resolvers to
in case there is an impl block with custom resolvers:
```rust
# extern crate juniper;
# use juniper::{
# GraphQLScalar, InputValue, ParseScalarResult, ParseScalarValue,
# ScalarValue, ScalarToken, Value
@ -243,7 +243,7 @@ enum StringOrInt {
impl StringOrInt {
fn to_output<S: ScalarValue>(&self) -> Value<S> {
match self {
Self::String(str) => Value::scalar(str.to_owned()),
Self::String(s) => Value::scalar(s.to_owned()),
Self::Int(i) => Value::scalar(*i),
}
}
@ -253,12 +253,12 @@ impl StringOrInt {
S: ScalarValue,
{
v.as_string_value()
.map(|s| Self::String(s.to_owned()))
.map(|s| Self::String(s.into()))
.or_else(|| v.as_int_value().map(Self::Int))
.ok_or_else(|| format!("Expected `String` or `Int`, found: {}", v))
.ok_or_else(|| format!("Expected `String` or `Int`, found: {v}"))
}
fn parse_token<S>(value: ScalarToken<'_>) -> ParseScalarResult<'_, S>
fn parse_token<S>(value: ScalarToken<'_>) -> ParseScalarResult<S>
where
S: ScalarValue,
{
@ -273,6 +273,7 @@ impl StringOrInt {
Or it can be path to a module, where custom resolvers are located.
```rust
# extern crate juniper;
# use juniper::{
# GraphQLScalar, InputValue, ParseScalarResult, ParseScalarValue,
# ScalarValue, ScalarToken, Value
@ -293,7 +294,7 @@ mod string_or_int {
S: ScalarValue,
{
match v {
StringOrInt::String(str) => Value::scalar(str.to_owned()),
StringOrInt::String(s) => Value::scalar(s.to_owned()),
StringOrInt::Int(i) => Value::scalar(*i),
}
}
@ -303,12 +304,12 @@ mod string_or_int {
S: ScalarValue,
{
v.as_string_value()
.map(|s| StringOrInt::String(s.to_owned()))
.map(|s| StringOrInt::String(s.into()))
.or_else(|| v.as_int_value().map(StringOrInt::Int))
.ok_or_else(|| format!("Expected `String` or `Int`, found: {}", v))
.ok_or_else(|| format!("Expected `String` or `Int`, found: {v}"))
}
pub(super) fn parse_token<S>(value: ScalarToken<'_>) -> ParseScalarResult<'_, S>
pub(super) fn parse_token<S>(value: ScalarToken<'_>) -> ParseScalarResult<S>
where
S: ScalarValue,
{
@ -323,6 +324,7 @@ mod string_or_int {
Also, you can partially override `#[graphql(with)]` attribute with other custom scalars.
```rust
# extern crate juniper;
# use juniper::{GraphQLScalar, InputValue, ParseScalarResult, ScalarValue, ScalarToken, Value};
#
#[derive(GraphQLScalar)]
@ -338,7 +340,7 @@ impl StringOrInt {
S: ScalarValue,
{
match self {
Self::String(str) => Value::scalar(str.to_owned()),
Self::String(s) => Value::scalar(s.to_owned()),
Self::Int(i) => Value::scalar(*i),
}
}
@ -348,9 +350,9 @@ impl StringOrInt {
S: ScalarValue,
{
v.as_string_value()
.map(|s| Self::String(s.to_owned()))
.map(|s| Self::String(s.into()))
.or_else(|| v.as_int_value().map(Self::Int))
.ok_or_else(|| format!("Expected `String` or `Int`, found: {}", v))
.ok_or_else(|| format!("Expected `String` or `Int`, found: {v}"))
}
}
#
@ -403,8 +405,8 @@ mod date_scalar {
pub(super) fn from_input(v: &InputValue<CustomScalarValue>) -> Result<Date, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.and_then(|s| s.parse().map_err(|e| format!("Failed to parse `Date`: {}", e)))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| s.parse().map_err(|e| format!("Failed to parse `Date`: {e}")))
}
}
#

View file

@ -130,7 +130,7 @@ impl Character {
# fn main() {}
```
With an external resolver function we can even declare a new [GraphQL union][1] variant where the Rust type is absent in the initial enum definition. The attribute syntax `#[graphql(on VariantType = resolver_fn)]` follows the [GraphQL syntax for dispatching union variants](https://spec.graphql.org/June2018/#example-f8163).
With an external resolver function we can even declare a new [GraphQL union][1] variant where the Rust type is absent in the initial enum definition. The attribute syntax `#[graphql(on VariantType = resolver_fn)]` follows the [GraphQL syntax for dispatching union variants](https://spec.graphql.org/October2021#example-f8163).
```rust
# #![allow(dead_code)]
@ -485,7 +485,7 @@ enum Character {
[1]: https://spec.graphql.org/June2018/#sec-Unions
[1]: https://spec.graphql.org/October2021#sec-Unions
[2]: https://docs.rs/juniper/latest/juniper/trait.ScalarValue.html
[5]: https://spec.graphql.org/June2018/#sec-Interfaces
[5]: https://spec.graphql.org/October2021#sec-Interfaces
[6]: https://docs.rs/juniper/0.14.2/juniper/trait.Context.html

View file

@ -1,22 +0,0 @@
[package]
name = "juniper_book_tests"
version = "0.0.0"
edition = "2018"
authors = ["Magnus Hallin <mhallin@fastmail.com>"]
publish = false
[dependencies]
derive_more = "0.99"
futures = "0.3"
iron = "0.6"
juniper = { path = "../../juniper" }
juniper_iron = { path = "../../juniper_iron" }
juniper_subscriptions = { path = "../../juniper_subscriptions" }
mount = "0.4"
serde_json = "1.0"
skeptic = "0.13"
tokio = { version = "1.0", features = ["macros", "rt-multi-thread", "sync"] }
uuid = "1.0"
[build-dependencies]
skeptic = "0.13"

View file

@ -1,4 +0,0 @@
fn main() {
let files = skeptic::markdown_files_of_directory("../src/");
skeptic::generate_doc_tests(&files);
}

View file

@ -1,3 +0,0 @@
#![deny(warnings)]
include!(concat!(env!("OUT_DIR"), "/skeptic-tests.rs"));

View file

@ -1,7 +1,8 @@
[package]
name = "example_actix_subscriptions"
version = "0.0.0"
edition = "2018"
edition = "2021"
rust-version = "1.62"
authors = ["Mihai Dinculescu <mihai.dinculescu@outlook.com>"]
publish = false

View file

@ -9,9 +9,9 @@ use actix_web::{
};
use juniper::{
graphql_object, graphql_subscription, graphql_value,
graphql_subscription, graphql_value,
tests::fixtures::starwars::schema::{Database, Query},
EmptyMutation, FieldError, RootNode,
EmptyMutation, FieldError, GraphQLObject, RootNode,
};
use juniper_actix::{graphql_handler, playground_handler, subscriptions::subscriptions_handler};
use juniper_graphql_ws::ConnectionConfig;
@ -37,23 +37,12 @@ async fn graphql(
struct Subscription;
#[derive(GraphQLObject)]
struct RandomHuman {
id: String,
name: String,
}
// TODO: remove this when async interfaces are merged
#[graphql_object(context = Database)]
impl RandomHuman {
fn id(&self) -> &str {
&self.id
}
fn name(&self) -> &str {
&self.name
}
}
type RandomHumanStream =
Pin<Box<dyn futures::Stream<Item = Result<RandomHuman, FieldError>> + Send>>;
@ -84,8 +73,8 @@ impl Subscription {
let human = context.get_human(&random_id).unwrap().clone();
yield Ok(RandomHuman {
id: human.id().to_owned(),
name: human.name().unwrap().to_owned(),
id: human.id().into(),
name: human.name().unwrap().into(),
})
}
}
@ -142,7 +131,7 @@ async fn main() -> std::io::Result<()> {
.finish()
}))
})
.bind(format!("{}:{}", "127.0.0.1", 8080))?
.bind("127.0.0.1:8080")?
.run()
.await
}

View file

@ -1,7 +1,8 @@
[package]
name = "example_basic_subscriptions"
version = "0.0.0"
edition = "2018"
edition = "2021"
rust-version = "1.62"
authors = ["Jordao Rosario <jordao.rosario01@gmail.com>"]
publish = false

View file

@ -16,7 +16,7 @@ impl juniper::Context for Database {}
impl Database {
fn new() -> Self {
Self {}
Self
}
}
@ -45,7 +45,7 @@ impl Subscription {
type Schema = RootNode<'static, Query, EmptyMutation<Database>, Subscription>;
fn schema() -> Schema {
Schema::new(Query {}, EmptyMutation::new(), Subscription {})
Schema::new(Query, EmptyMutation::new(), Subscription)
}
#[tokio::main]

View file

@ -1,7 +1,8 @@
[package]
name = "example_warp_async"
version = "0.0.0"
edition = "2018"
edition = "2021"
rust-version = "1.62"
authors = ["Christoph Herzog <chris@theduke.at>"]
publish = false

View file

@ -1,13 +1,14 @@
[package]
name = "example_warp_subscriptions"
version = "0.0.0"
edition = "2018"
edition = "2021"
rust-version = "1.62"
publish = false
[dependencies]
async-stream = "0.3"
env_logger = "0.9"
futures = "0.3.1"
futures = "0.3"
juniper = { path = "../../juniper" }
juniper_graphql_ws = { path = "../../juniper_graphql_ws" }
juniper_warp = { path = "../../juniper_warp", features = ["subscriptions"] }

View file

@ -12,7 +12,7 @@ use juniper_warp::{playground_filter, subscriptions::serve_graphql_ws};
use warp::{http::Response, Filter};
#[derive(Clone)]
struct Context {}
struct Context;
impl juniper::Context for Context {}
@ -46,7 +46,7 @@ impl User {
async fn friends(&self) -> Vec<User> {
if self.id == 1 {
return vec![
vec![
User {
id: 11,
kind: UserKind::User,
@ -62,15 +62,15 @@ impl User {
kind: UserKind::Guest,
name: "user13".into(),
},
];
]
} else if self.id == 2 {
return vec![User {
vec![User {
id: 21,
kind: UserKind::User,
name: "user21".into(),
}];
}]
} else if self.id == 3 {
return vec![
vec![
User {
id: 31,
kind: UserKind::User,
@ -81,9 +81,9 @@ impl User {
kind: UserKind::Guest,
name: "user32".into(),
},
];
]
} else {
return vec![];
vec![]
}
}
}
@ -123,7 +123,7 @@ impl Subscription {
yield Ok(User {
id: counter,
kind: UserKind::Admin,
name: "stream user".to_string(),
name: "stream user".into(),
})
}
}
@ -149,11 +149,11 @@ async fn main() {
let homepage = warp::path::end().map(|| {
Response::builder()
.header("content-type", "text/html")
.body("<html><h1>juniper_subscriptions demo</h1><div>visit <a href=\"/playground\">graphql playground</a></html>".to_string())
.body("<html><h1>juniper_subscriptions demo</h1><div>visit <a href=\"/playground\">graphql playground</a></html>")
});
let qm_schema = schema();
let qm_state = warp::any().map(move || Context {});
let qm_state = warp::any().map(|| Context);
let qm_graphql_filter = juniper_warp::make_graphql_filter(qm_schema, qm_state.boxed());
let root_node = Arc::new(schema());
@ -165,10 +165,10 @@ async fn main() {
.map(move |ws: warp::ws::Ws| {
let root_node = root_node.clone();
ws.on_upgrade(move |websocket| async move {
serve_graphql_ws(websocket, root_node, ConnectionConfig::new(Context {}))
serve_graphql_ws(websocket, root_node, ConnectionConfig::new(Context))
.map(|r| {
if let Err(e) = r {
println!("Websocket error: {}", e);
println!("Websocket error: {e}");
}
})
.await

View file

@ -27,10 +27,11 @@ All user visible changes to `juniper` crate will be documented in this file. Thi
- Removed support for `dyn` attribute argument (interface values as trait objects).
- Removed support for `downcast` attribute argument (custom resolution into implementer types).
- Removed support for `async` trait methods (not required anymore).
- Removed necessity of writing `impl Trait for Type` blocks (interfaces are implemented just by matching their fields now).
- Removed necessity of writing `impl Trait for Type` blocks (interfaces are implemented just by matching their fields now). ([#113])
- Forbade default implementations of non-ignored trait methods.
- Supported coercion of additional `null`able arguments and return sub-typing on implementer.
- Supported `rename_all = "<policy>"` attribute argument influencing all its fields and their arguments. ([#971])
- Supported interfaces implementing other interfaces. ([#1028])
- Split `#[derive(GraphQLScalarValue)]` macro into:
- `#[derive(GraphQLScalar)]` for implementing GraphQL scalar: ([#1017])
- Supported generic `ScalarValue`.
@ -47,6 +48,7 @@ All user visible changes to `juniper` crate will be documented in this file. Thi
- Reworked [`chrono` crate] integration GraphQL scalars according to [graphql-scalars.dev] specs: ([#1010])
- Disabled `chrono` [Cargo feature] by default.
- Removed `scalar-naivetime` [Cargo feature].
- Removed lifetime parameter from `ParseError`, `GraphlQLError`, `GraphQLBatchRequest` and `GraphQLRequest`. ([#1081], [#528])
### Added
@ -70,8 +72,13 @@ All user visible changes to `juniper` crate will be documented in this file. Thi
- Unsupported expressions in `graphql_value!` macro. ([#996], [#503])
- Incorrect GraphQL list coercion rules: `null` cannot be coerced to an `[Int!]!` or `[Int]!`. ([#1004])
- All procedural macros expansion inside `macro_rules!`. ([#1054], [#1051])
- Incorrect input value coercion with defaults. ([#1080], [#1073])
- Incorrect error when explicit `null` provided for `null`able list input parameter. ([#1086], [#1085])
- Stack overflow on nested GraphQL fragments. ([CVE-2022-31173])
[#113]: /../../issues/113
[#503]: /../../issues/503
[#528]: /../../issues/528
[#750]: /../../issues/750
[#798]: /../../issues/798
[#918]: /../../issues/918
@ -94,11 +101,18 @@ All user visible changes to `juniper` crate will be documented in this file. Thi
[#1017]: /../../pull/1017
[#1025]: /../../pull/1025
[#1026]: /../../pull/1026
[#1028]: /../../pull/1028
[#1051]: /../../issues/1051
[#1054]: /../../pull/1054
[#1057]: /../../pull/1057
[#1060]: /../../pull/1060
[#1073]: /../../issues/1073
[#1080]: /../../pull/1080
[#1081]: /../../pull/1081
[#1085]: /../../issues/1085
[#1086]: /../../pull/1086
[ba1ed85b]: /../../commit/ba1ed85b3c3dd77fbae7baf6bc4e693321a94083
[CVE-2022-31173]: /../../security/advisories/GHSA-4rx6-g5vg-5f3j

View file

@ -1,7 +1,8 @@
[package]
name = "juniper"
version = "0.16.0-dev"
edition = "2018"
edition = "2021"
rust-version = "1.62"
description = "GraphQL server library."
license = "BSD-2-Clause"
authors = [
@ -38,11 +39,11 @@ schema-language = ["graphql-parser"]
anyhow = { version = "1.0.32", default-features = false, optional = true }
async-trait = "0.1.39"
bigdecimal = { version = "0.3", optional = true }
bson = { version = "2.0", features = ["chrono-0_4"], optional = true }
chrono = { version = "0.4", features = ["alloc"], default-features = false, optional = true }
bson = { version = "2.4", features = ["chrono-0_4"], optional = true }
chrono = { version = "0.4.20", features = ["alloc"], default-features = false, optional = true }
chrono-tz = { version = "0.6", default-features = false, optional = true }
fnv = "1.0.3"
futures = { version = "0.3.1", features = ["alloc"], default-features = false }
futures = { version = "0.3.22", features = ["alloc"], default-features = false }
futures-enum = { version = "0.1.12", default-features = false }
graphql-parser = { version = "0.4", optional = true }
indexmap = { version = "1.0", features = ["serde-1"] }
@ -58,12 +59,10 @@ uuid = { version = "1.0", default-features = false, optional = true }
[target.'cfg(target_arch = "wasm32")'.dependencies]
getrandom = { version = "0.2", features = ["js"] }
# not used, to fix `bson` compilation only
uuid_08 = { version = "0.8", package = "uuid", default-features = false, features = ["wasm-bindgen"] }
[dev-dependencies]
bencher = "0.1.2"
chrono = { version = "0.4", features = ["alloc"], default-features = false }
chrono = { version = "0.4.20", features = ["alloc"], default-features = false }
pretty_assertions = "1.0.0"
serde_json = "1.0.2"
tokio = { version = "1.0", features = ["macros", "time", "rt-multi-thread"] }

View file

@ -114,7 +114,7 @@ pub struct Directive<'a, S> {
}
#[allow(missing_docs)]
#[derive(Clone, PartialEq, Debug)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum OperationType {
Query,
Mutation,
@ -224,10 +224,10 @@ impl<'a> Type<'a> {
impl<'a> fmt::Display for Type<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Named(n) => write!(f, "{}", n),
Self::NonNullNamed(n) => write!(f, "{}!", n),
Self::List(t, _) => write!(f, "[{}]", t),
Self::NonNullList(t, _) => write!(f, "[{}]!", t),
Self::Named(n) => write!(f, "{n}"),
Self::NonNullNamed(n) => write!(f, "{n}!"),
Self::List(t, _) => write!(f, "[{t}]"),
Self::NonNullList(t, _) => write!(f, "[{t}]!"),
}
}
}
@ -248,12 +248,12 @@ impl<S> InputValue<S> {
/// Construct an enum value.
pub fn enum_value<T: AsRef<str>>(s: T) -> Self {
Self::Enum(s.as_ref().to_owned())
Self::Enum(s.as_ref().into())
}
/// Construct a variable value.
pub fn variable<T: AsRef<str>>(v: T) -> Self {
Self::Variable(v.as_ref().to_owned())
Self::Variable(v.as_ref().into())
}
/// Construct a [`Spanning::unlocated`] list.
@ -282,7 +282,7 @@ impl<S> InputValue<S> {
o.into_iter()
.map(|(k, v)| {
(
Spanning::unlocated(k.as_ref().to_owned()),
Spanning::unlocated(k.as_ref().into()),
Spanning::unlocated(v),
)
})
@ -295,25 +295,36 @@ impl<S> InputValue<S> {
Self::Object(o)
}
/// Resolve all variables to their values.
/// Resolves all variables of this [`InputValue`] to their actual `values`.
///
/// If a variable is not present in the `values`:
/// - Returns [`None`] in case this is an [`InputValue::Variable`].
/// - Skips field in case of an [`InputValue::Object`] field.
/// - Replaces with an [`InputValue::Null`] in case of an
/// [`InputValue::List`] element.
///
/// This is done, because for an [`InputValue::Variable`] (or an
/// [`InputValue::Object`] field) a default value can be used later, if it's
/// provided. While on contrary, a single [`InputValue::List`] element
/// cannot have a default value.
#[must_use]
pub fn into_const(self, vars: &Variables<S>) -> Self
pub fn into_const(self, values: &Variables<S>) -> Option<Self>
where
S: Clone,
{
match self {
Self::Variable(v) => vars.get(&v).map_or_else(InputValue::null, Clone::clone),
Self::List(l) => Self::List(
Self::Variable(v) => values.get(&v).cloned(),
Self::List(l) => Some(Self::List(
l.into_iter()
.map(|s| s.map(|v| v.into_const(vars)))
.map(|s| s.map(|v| v.into_const(values).unwrap_or_else(Self::null)))
.collect(),
),
Self::Object(o) => Self::Object(
)),
Self::Object(o) => Some(Self::Object(
o.into_iter()
.map(|(sk, sv)| (sk, sv.map(|v| v.into_const(vars))))
.filter_map(|(sk, sv)| sv.and_then(|v| v.into_const(values)).map(|sv| (sk, sv)))
.collect(),
),
v => v,
)),
v => Some(v),
}
}
@ -456,13 +467,13 @@ impl<S: ScalarValue> fmt::Display for InputValue<S> {
Self::Null => write!(f, "null"),
Self::Scalar(s) => {
if let Some(s) = s.as_str() {
write!(f, "\"{}\"", s)
write!(f, "\"{s}\"")
} else {
write!(f, "{}", s)
write!(f, "{s}")
}
}
Self::Enum(v) => write!(f, "{}", v),
Self::Variable(v) => write!(f, "${}", v),
Self::Enum(v) => write!(f, "{v}"),
Self::Variable(v) => write!(f, "${v}"),
Self::List(v) => {
write!(f, "[")?;
for (i, spanning) in v.iter().enumerate() {
@ -577,30 +588,30 @@ mod tests {
#[test]
fn test_input_value_fmt() {
let value: InputValue = graphql_input_value!(null);
assert_eq!(format!("{}", value), "null");
assert_eq!(value.to_string(), "null");
let value: InputValue = graphql_input_value!(123);
assert_eq!(format!("{}", value), "123");
assert_eq!(value.to_string(), "123");
let value: InputValue = graphql_input_value!(12.3);
assert_eq!(format!("{}", value), "12.3");
assert_eq!(value.to_string(), "12.3");
let value: InputValue = graphql_input_value!("FOO");
assert_eq!(format!("{}", value), "\"FOO\"");
assert_eq!(value.to_string(), "\"FOO\"");
let value: InputValue = graphql_input_value!(true);
assert_eq!(format!("{}", value), "true");
assert_eq!(value.to_string(), "true");
let value: InputValue = graphql_input_value!(BAR);
assert_eq!(format!("{}", value), "BAR");
assert_eq!(value.to_string(), "BAR");
let value: InputValue = graphql_input_value!(@baz);
assert_eq!(format!("{}", value), "$baz");
assert_eq!(value.to_string(), "$baz");
let value: InputValue = graphql_input_value!([1, 2]);
assert_eq!(format!("{}", value), "[1, 2]");
assert_eq!(value.to_string(), "[1, 2]");
let value: InputValue = graphql_input_value!({"foo": 1,"bar": 2});
assert_eq!(format!("{}", value), "{foo: 1, bar: 2}");
assert_eq!(value.to_string(), "{foo: 1, bar: 2}");
}
}

View file

@ -10,7 +10,7 @@ use super::Variables;
/// An enum that describes if a field is available in all types of the interface
/// or only in a certain subtype
#[derive(Debug, Clone, PartialEq)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Applies<'a> {
/// The field is available independent from the type
All,
@ -112,12 +112,11 @@ pub struct LookAheadSelection<'a, S: 'a> {
pub(super) children: Vec<ChildSelection<'a, S>>,
}
impl<'a, S> Default for LookAheadSelection<'a, S>
where
S: ScalarValue,
{
// Implemented manually to omit redundant `S: Default` trait bound, imposed by
// `#[derive(Default)]`.
impl<'a, S: 'a> Default for LookAheadSelection<'a, S> {
fn default() -> Self {
LookAheadSelection {
Self {
name: "",
alias: None,
arguments: vec![],

View file

@ -304,7 +304,7 @@ where
type Type;
#[doc(hidden)]
fn into(self, ctx: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S>;
fn into_resolvable(self, ctx: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S>;
}
impl<'a, S, T, C> IntoResolvable<'a, S, T, C> for T
@ -315,7 +315,7 @@ where
{
type Type = T;
fn into(self, ctx: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S> {
fn into_resolvable(self, ctx: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S> {
Ok(Some((FromContext::from(ctx), self)))
}
}
@ -328,7 +328,7 @@ where
{
type Type = T;
fn into(self, ctx: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S> {
fn into_resolvable(self, ctx: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S> {
self.map(|v: T| Some((<T::Context as FromContext<C>>::from(ctx), v)))
.map_err(IntoFieldError::into_field_error)
}
@ -341,7 +341,7 @@ where
{
type Type = T;
fn into(self, _: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S> {
fn into_resolvable(self, _: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S> {
Ok(Some(self))
}
}
@ -354,7 +354,7 @@ where
type Type = T;
#[allow(clippy::type_complexity)]
fn into(self, _: &'a C) -> FieldResult<Option<(&'a T::Context, Option<T>)>, S> {
fn into_resolvable(self, _: &'a C) -> FieldResult<Option<(&'a T::Context, Option<T>)>, S> {
Ok(self.map(|(ctx, v)| (ctx, Some(v))))
}
}
@ -367,7 +367,7 @@ where
{
type Type = T;
fn into(self, _: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S2> {
fn into_resolvable(self, _: &'a C) -> FieldResult<Option<(&'a T::Context, T)>, S2> {
self.map(Some).map_err(FieldError::map_scalar_value)
}
}
@ -382,7 +382,7 @@ where
type Type = T;
#[allow(clippy::type_complexity)]
fn into(self, _: &'a C) -> FieldResult<Option<(&'a T::Context, Option<T>)>, S2> {
fn into_resolvable(self, _: &'a C) -> FieldResult<Option<(&'a T::Context, Option<T>)>, S2> {
self.map(|o| o.map(|(ctx, v)| (ctx, Some(v))))
.map_err(FieldError::map_scalar_value)
}
@ -774,7 +774,7 @@ impl<'a> FieldPath<'a> {
FieldPath::Root(_) => (),
FieldPath::Field(name, _, parent) => {
parent.construct_path(acc);
acc.push((*name).to_owned());
acc.push((*name).into());
}
}
}
@ -791,7 +791,7 @@ impl<S> ExecutionError<S> {
pub fn new(location: SourcePosition, path: &[&str], error: FieldError<S>) -> ExecutionError<S> {
ExecutionError {
location,
path: path.iter().map(|s| (*s).to_owned()).collect(),
path: path.iter().map(|s| (*s).into()).collect(),
error,
}
}
@ -814,13 +814,13 @@ impl<S> ExecutionError<S> {
/// Create new `Executor` and start query/mutation execution.
/// Returns `IsSubscription` error if subscription is passed.
pub fn execute_validated_query<'a, 'b, QueryT, MutationT, SubscriptionT, S>(
pub fn execute_validated_query<'b, QueryT, MutationT, SubscriptionT, S>(
document: &'b Document<S>,
operation: &'b Spanning<Operation<S>>,
root_node: &RootNode<QueryT, MutationT, SubscriptionT, S>,
variables: &Variables<S>,
context: &QueryT::Context,
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError<'a>>
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError>
where
S: ScalarValue,
QueryT: GraphQLType<S>,
@ -842,10 +842,10 @@ where
defs.item
.items
.iter()
.filter_map(|&(ref name, ref def)| {
.filter_map(|(name, def)| {
def.default_value
.as_ref()
.map(|i| (name.item.to_owned(), i.item.clone()))
.map(|i| (name.item.into(), i.item.clone()))
})
.collect::<HashMap<String, InputValue<S>>>()
});
@ -914,7 +914,7 @@ pub async fn execute_validated_query_async<'a, 'b, QueryT, MutationT, Subscripti
root_node: &RootNode<'a, QueryT, MutationT, SubscriptionT, S>,
variables: &Variables<S>,
context: &QueryT::Context,
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError<'a>>
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError>
where
QueryT: GraphQLTypeAsync<S>,
QueryT::TypeInfo: Sync,
@ -943,7 +943,7 @@ where
.filter_map(|&(ref name, ref def)| {
def.default_value
.as_ref()
.map(|i| (name.item.to_owned(), i.item.clone()))
.map(|i| (name.item.into(), i.item.clone()))
})
.collect::<HashMap<String, InputValue<S>>>()
});
@ -1011,10 +1011,10 @@ where
}
#[doc(hidden)]
pub fn get_operation<'b, 'd, 'e, S>(
pub fn get_operation<'b, 'd, S>(
document: &'b Document<'d, S>,
operation_name: Option<&str>,
) -> Result<&'b Spanning<Operation<'d, S>>, GraphQLError<'e>>
) -> Result<&'b Spanning<Operation<'d, S>>, GraphQLError>
where
S: ScalarValue,
{
@ -1058,7 +1058,7 @@ pub async fn resolve_validated_subscription<
root_node: &'r RootNode<'r, QueryT, MutationT, SubscriptionT, S>,
variables: &Variables<S>,
context: &'r QueryT::Context,
) -> Result<(Value<ValuesStream<'r, S>>, Vec<ExecutionError<S>>), GraphQLError<'r>>
) -> Result<(Value<ValuesStream<'r, S>>, Vec<ExecutionError<S>>), GraphQLError>
where
'r: 'exec_ref,
'd: 'r,
@ -1090,7 +1090,7 @@ where
.filter_map(|&(ref name, ref def)| {
def.default_value
.as_ref()
.map(|i| (name.item.to_owned(), i.item.clone()))
.map(|i| (name.item.into(), i.item.clone()))
})
.collect::<HashMap<String, InputValue<S>>>()
});
@ -1172,7 +1172,7 @@ impl<'r, S: 'r> Registry<'r, S> {
if !self.types.contains_key(name) {
self.insert_placeholder(
validated_name.clone(),
Type::NonNullNamed(Cow::Owned(name.to_string())),
Type::NonNullNamed(Cow::Owned(name.into())),
);
let meta = T::meta(info, self);
self.types.insert(validated_name, meta);
@ -1209,7 +1209,7 @@ impl<'r, S: 'r> Registry<'r, S> {
S: ScalarValue,
{
Field {
name: smartstring::SmartString::from(name),
name: name.into(),
description: None,
arguments: None,
field_type: self.get_type::<I>(info),
@ -1227,9 +1227,6 @@ impl<'r, S: 'r> Registry<'r, S> {
}
/// Creates an [`Argument`] with the provided default `value`.
///
/// When called with type `T`, the actual [`Argument`] will be given the
/// type `Option<T>`.
pub fn arg_with_default<T>(
&mut self,
name: &str,
@ -1240,7 +1237,7 @@ impl<'r, S: 'r> Registry<'r, S> {
T: GraphQLType<S> + ToInputValue<S> + FromInputValue<S>,
S: ScalarValue,
{
Argument::new(name, self.get_type::<Option<T>>(info)).default_value(value.to_input_value())
Argument::new(name, self.get_type::<T>(info)).default_value(value.to_input_value())
}
fn insert_placeholder(&mut self, name: Name, of_type: Type<'r>) {
@ -1258,7 +1255,7 @@ impl<'r, S: 'r> Registry<'r, S> {
{
let name = T::name(info).expect("Scalar types must be named. Implement `name()`");
ScalarMeta::new::<T>(Cow::Owned(name.to_string()))
ScalarMeta::new::<T>(Cow::Owned(name.into()))
}
/// Creates a [`ListMeta`] type.
@ -1302,7 +1299,7 @@ impl<'r, S: 'r> Registry<'r, S> {
let mut v = fields.to_vec();
v.push(self.field::<String>("__typename", &()));
ObjectMeta::new(Cow::Owned(name.to_string()), &v)
ObjectMeta::new(Cow::Owned(name.into()), &v)
}
/// Creates an [`EnumMeta`] type out of the provided `values`.
@ -1318,7 +1315,7 @@ impl<'r, S: 'r> Registry<'r, S> {
{
let name = T::name(info).expect("Enum types must be named. Implement `name()`");
EnumMeta::new::<T>(Cow::Owned(name.to_string()), values)
EnumMeta::new::<T>(Cow::Owned(name.into()), values)
}
/// Creates an [`InterfaceMeta`] type with the given `fields`.
@ -1335,7 +1332,7 @@ impl<'r, S: 'r> Registry<'r, S> {
let mut v = fields.to_vec();
v.push(self.field::<String>("__typename", &()));
InterfaceMeta::new(Cow::Owned(name.to_string()), &v)
InterfaceMeta::new(Cow::Owned(name.into()), &v)
}
/// Creates an [`UnionMeta`] type of the given `types`.
@ -1346,7 +1343,7 @@ impl<'r, S: 'r> Registry<'r, S> {
{
let name = T::name(info).expect("Union types must be named. Implement name()");
UnionMeta::new(Cow::Owned(name.to_string()), types)
UnionMeta::new(Cow::Owned(name.into()), types)
}
/// Creates an [`InputObjectMeta`] type with the given `args`.
@ -1362,6 +1359,6 @@ impl<'r, S: 'r> Registry<'r, S> {
{
let name = T::name(info).expect("Input object types must be named. Implement name()");
InputObjectMeta::new::<T>(Cow::Owned(name.to_string()), args)
InputObjectMeta::new::<T>(Cow::Owned(name.into()), args)
}
}

View file

@ -1,5 +1,6 @@
use crate::{
graphql_object, graphql_value, EmptyMutation, EmptySubscription, GraphQLEnum, RootNode, Value,
graphql_object, graphql_value, graphql_vars, EmptyMutation, EmptySubscription, GraphQLEnum,
RootNode, Value,
};
#[derive(GraphQLEnum)]
@ -30,7 +31,7 @@ impl User {
(0..10)
.map(|index| User {
id: index,
name: format!("user{}", index),
name: format!("user{index}"),
kind: UserKind::User,
})
.collect()
@ -55,7 +56,7 @@ impl Query {
}
async fn field_async_plain() -> String {
"field_async_plain".to_string()
"field_async_plain".into()
}
fn user(id: String) -> User {
@ -86,8 +87,7 @@ async fn async_simple() {
}
"#;
let vars = Default::default();
let (res, errs) = crate::execute(doc, None, &schema, &vars, &())
let (res, errs) = crate::execute(doc, None, &schema, &graphql_vars! {}, &())
.await
.unwrap();

View file

@ -35,7 +35,7 @@ where
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
let obj = result.as_object_value().expect("Result is not an object");

View file

@ -22,7 +22,7 @@ struct TestType;
#[crate::graphql_object]
impl TestType {
fn to_string(color: Color) -> String {
format!("Color::{:?}", color)
format!("Color::{color:?}")
}
fn a_color() -> Color {
@ -46,7 +46,7 @@ where
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
let obj = result.as_object_value().expect("Result is not an object");

View file

@ -96,7 +96,7 @@ mod field_execution {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,
@ -180,7 +180,7 @@ mod merge_parallel_fragments {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,
@ -288,7 +288,7 @@ mod merge_parallel_inline_fragments {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,
@ -355,7 +355,7 @@ mod threads_context_correctly {
&schema,
&vars,
&TestContext {
value: "Context value".to_owned(),
value: "Context value".into(),
},
)
.await
@ -363,7 +363,7 @@ mod threads_context_correctly {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(result, graphql_value!({"a": "Context value"}));
}
@ -410,7 +410,7 @@ mod dynamic_context_switching {
let res = context
.items
.get(&key)
.ok_or(format!("Could not find key {}", key))
.ok_or(format!("Could not find key {key}"))
.map(|c| (c, ItemRef))?;
Ok(res)
}
@ -420,7 +420,7 @@ mod dynamic_context_switching {
key: i32,
) -> FieldResult<Option<(&InnerContext, ItemRef)>> {
if key > 100 {
Err(format!("Key too large: {}", key))?;
Err(format!("Key too large: {key}"))?;
}
Ok(context.items.get(&key).map(|c| (c, ItemRef)))
}
@ -452,13 +452,13 @@ mod dynamic_context_switching {
(
0,
InnerContext {
value: "First value".to_owned(),
value: "First value".into(),
},
),
(
1,
InnerContext {
value: "Second value".to_owned(),
value: "Second value".into(),
},
),
]
@ -472,7 +472,7 @@ mod dynamic_context_switching {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,
@ -500,13 +500,13 @@ mod dynamic_context_switching {
(
0,
InnerContext {
value: "First value".to_owned(),
value: "First value".into(),
},
),
(
1,
InnerContext {
value: "Second value".to_owned(),
value: "Second value".into(),
},
),
]
@ -520,7 +520,7 @@ mod dynamic_context_switching {
assert_eq!(errs, vec![]);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(result, graphql_value!({"first": {"value": "First value"}}));
}
@ -542,13 +542,13 @@ mod dynamic_context_switching {
(
0,
InnerContext {
value: "First value".to_owned(),
value: "First value".into(),
},
),
(
1,
InnerContext {
value: "Second value".to_owned(),
value: "Second value".into(),
},
),
]
@ -569,7 +569,7 @@ mod dynamic_context_switching {
)],
);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(result, graphql_value!(null));
}
@ -593,13 +593,13 @@ mod dynamic_context_switching {
(
0,
InnerContext {
value: "First value".to_owned(),
value: "First value".into(),
},
),
(
1,
InnerContext {
value: "Second value".to_owned(),
value: "Second value".into(),
},
),
]
@ -620,7 +620,7 @@ mod dynamic_context_switching {
)],
);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,
@ -647,13 +647,13 @@ mod dynamic_context_switching {
(
0,
InnerContext {
value: "First value".to_owned(),
value: "First value".into(),
},
),
(
1,
InnerContext {
value: "Second value".to_owned(),
value: "Second value".into(),
},
),
]
@ -667,7 +667,7 @@ mod dynamic_context_switching {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(result, graphql_value!({"first": {"value": "First value"}}));
}
@ -752,7 +752,7 @@ mod propagates_errors_to_nullable_fields {
.await
.expect("Execution failed");
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,
@ -783,7 +783,7 @@ mod propagates_errors_to_nullable_fields {
.await
.expect("Execution failed");
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(result, graphql_value!(null));
@ -811,7 +811,7 @@ mod propagates_errors_to_nullable_fields {
.await
.expect("Execution failed");
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(result, graphql_value!(null));
@ -839,7 +839,7 @@ mod propagates_errors_to_nullable_fields {
.await
.expect("Execution failed");
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(result, graphql_value!({"inner": {"nullableField": null}}),);
@ -867,7 +867,7 @@ mod propagates_errors_to_nullable_fields {
.await
.expect("Execution failed");
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(result, graphql_value!(null));
@ -895,7 +895,7 @@ mod propagates_errors_to_nullable_fields {
.await
.expect("Execution failed");
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,
@ -926,7 +926,7 @@ mod propagates_errors_to_nullable_fields {
.await
.expect("Execution failed");
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(result, graphql_value!(null));
@ -954,7 +954,7 @@ mod propagates_errors_to_nullable_fields {
.await
.expect("Execution failed");
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,

View file

@ -42,12 +42,12 @@ mod interface {
Schema {
pets: vec![
Dog {
name: "Odie".to_owned(),
name: "Odie".into(),
woofs: true,
}
.into(),
Cat {
name: "Garfield".to_owned(),
name: "Garfield".into(),
meows: false,
}
.into(),
@ -77,7 +77,7 @@ mod interface {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,
@ -170,11 +170,11 @@ mod union {
Schema {
pets: vec![
Box::new(Dog {
name: "Odie".to_owned(),
name: "Odie".into(),
woofs: true,
}),
Box::new(Cat {
name: "Garfield".to_owned(),
name: "Garfield".into(),
meows: false,
}),
],
@ -205,7 +205,7 @@ mod union {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,

View file

@ -92,7 +92,7 @@ where
F: Fn((&Object<DefaultScalarValue>, &Vec<Value<DefaultScalarValue>>)) -> (),
{
let schema = RootNode::new(
Root {},
Root,
EmptyMutation::<()>::new(),
EmptySubscription::<()>::new(),
);
@ -103,7 +103,7 @@ where
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
let type_info = result
.as_object_value()

View file

@ -76,9 +76,9 @@ struct FieldDescription {
#[derive(GraphQLInputObject, Debug)]
struct FieldWithDefaults {
#[graphql(default = "123")]
#[graphql(default = 123)]
field_one: i32,
#[graphql(default = "456", description = "The second field")]
#[graphql(default = 456, description = "The second field")]
field_two: i32,
}
@ -117,7 +117,7 @@ where
F: Fn(&Object<DefaultScalarValue>, &Vec<Value<DefaultScalarValue>>) -> (),
{
let schema = RootNode::new(
Root {},
Root,
EmptyMutation::<()>::new(),
EmptySubscription::<()>::new(),
);
@ -128,7 +128,7 @@ where
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
let type_info = result
.as_object_value()
@ -312,7 +312,7 @@ fn derive_derived() {
format!(
"{:?}",
Derive {
field_one: "test".to_owned(),
field_one: "test".into(),
},
),
"Derive { field_one: \"test\" }"
@ -462,6 +462,9 @@ async fn field_with_defaults_introspection() {
name
type {
name
ofType {
name
}
}
defaultValue
}
@ -477,12 +480,12 @@ async fn field_with_defaults_introspection() {
assert_eq!(fields.len(), 2);
assert!(fields.contains(&graphql_value!({
"name": "fieldOne",
"type": {"name": "Int"},
"type": {"name": null, "ofType": {"name": "Int"}},
"defaultValue": "123",
})));
assert!(fields.contains(&graphql_value!({
"name": "fieldTwo",
"type": {"name": "Int"},
"type": {"name": null, "ofType": {"name": "Int"}},
"defaultValue": "456",
})));
})

View file

@ -69,7 +69,7 @@ async fn test_execution() {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
assert_eq!(
result,
@ -114,7 +114,7 @@ async fn enum_introspection() {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
let type_info = result
.as_object_value()
@ -223,7 +223,7 @@ async fn interface_introspection() {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
let type_info = result
.as_object_value()
@ -247,7 +247,7 @@ async fn interface_introspection() {
);
assert_eq!(
type_info.get_field_value("interfaces"),
Some(&graphql_value!(null)),
Some(&graphql_value!([])),
);
assert_eq!(
type_info.get_field_value("enumValues"),
@ -355,7 +355,7 @@ async fn object_introspection() {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
let type_info = result
.as_object_value()
@ -406,7 +406,7 @@ async fn object_introspection() {
assert_eq!(fields.len(), 2);
println!("Fields: {:#?}", fields);
println!("Fields: {fields:#?}");
assert!(fields.contains(&graphql_value!({
"name": "sampleEnum",
@ -444,9 +444,13 @@ async fn object_introspection() {
"name": "second",
"description": "The second number",
"type": {
"name": "Int",
"kind": "SCALAR",
"ofType": null,
"name": null,
"kind": "NON_NULL",
"ofType": {
"name": "Int",
"kind": "SCALAR",
"ofType": null,
},
},
"defaultValue": "123",
}],
@ -493,7 +497,7 @@ async fn scalar_introspection() {
assert_eq!(errs, []);
println!("Result: {:#?}", result);
println!("Result: {result:#?}");
let type_info = result
.as_object_value()

View file

@ -23,7 +23,7 @@ impl TestComplexScalar {
v.as_string_value()
.filter(|s| *s == "SerializedValue")
.map(|_| Self)
.ok_or_else(|| format!(r#"Expected "SerializedValue" string, found: {}"#, v))
.ok_or_else(|| format!(r#"Expected "SerializedValue" string, found: {v}"#))
}
}
@ -49,7 +49,7 @@ struct ExampleInputObject {
#[derive(GraphQLInputObject, Debug)]
struct InputWithDefaults {
#[graphql(default = "123")]
#[graphql(default = 123)]
a: i32,
}
@ -58,41 +58,47 @@ struct TestType;
#[graphql_object]
impl TestType {
fn field_with_object_input(input: Option<TestInputObject>) -> String {
format!("{:?}", input)
format!("{input:?}")
}
fn field_with_nullable_string_input(input: Option<String>) -> String {
format!("{:?}", input)
format!("{input:?}")
}
fn field_with_non_nullable_string_input(input: String) -> String {
format!("{:?}", input)
format!("{input:?}")
}
fn field_with_default_argument_value(
#[graphql(default = "Hello World")] input: String,
) -> String {
format!("{:?}", input)
format!("{input:?}")
}
fn nullable_field_with_default_argument_value(
#[graphql(default = "Hello World".to_owned())] input: Option<String>,
) -> String {
format!("{input:?}")
}
fn field_with_nested_object_input(input: Option<TestNestedInputObject>) -> String {
format!("{:?}", input)
format!("{input:?}")
}
fn list(input: Option<Vec<Option<String>>>) -> String {
format!("{:?}", input)
format!("{input:?}")
}
fn nn_list(input: Vec<Option<String>>) -> String {
format!("{:?}", input)
format!("{input:?}")
}
fn list_nn(input: Option<Vec<String>>) -> String {
format!("{:?}", input)
format!("{input:?}")
}
fn nn_list_nn(input: Vec<String>) -> String {
format!("{:?}", input)
format!("{input:?}")
}
fn example_input(arg: ExampleInputObject) -> String {
@ -104,11 +110,11 @@ impl TestType {
}
fn integer_input(value: i32) -> String {
format!("value: {}", value)
format!("value: {value}")
}
fn float_input(value: f64) -> String {
format!("value: {}", value)
format!("value: {value}")
}
}
@ -128,7 +134,7 @@ where
assert_eq!(errs, []);
println!("Result: {:?}", result);
println!("Result: {result:?}");
let obj = result.as_object_value().expect("Result is not an object");
@ -791,13 +797,14 @@ async fn default_argument_when_not_provided() {
}
#[tokio::test]
async fn default_argument_when_nullable_variable_not_provided() {
run_query(
r#"query q($input: String) { fieldWithDefaultArgumentValue(input: $input) }"#,
async fn provided_variable_overwrites_default_value() {
run_variable_query(
r#"query q($input: String!) { fieldWithDefaultArgumentValue(input: $input) }"#,
graphql_vars! {"input": "Overwritten"},
|result| {
assert_eq!(
result.get_field_value("fieldWithDefaultArgumentValue"),
Some(&graphql_value!(r#""Hello World""#)),
Some(&graphql_value!(r#""Overwritten""#)),
);
},
)
@ -805,14 +812,28 @@ async fn default_argument_when_nullable_variable_not_provided() {
}
#[tokio::test]
async fn default_argument_when_nullable_variable_set_to_null() {
async fn default_argument_when_nullable_variable_not_provided() {
run_query(
r#"query q($input: String) { nullableFieldWithDefaultArgumentValue(input: $input) }"#,
|result| {
assert_eq!(
result.get_field_value("nullableFieldWithDefaultArgumentValue"),
Some(&graphql_value!(r#"Some("Hello World")"#)),
);
},
)
.await;
}
#[tokio::test]
async fn null_when_nullable_variable_of_argument_with_default_value_set_to_null() {
run_variable_query(
r#"query q($input: String) { fieldWithDefaultArgumentValue(input: $input) }"#,
r#"query q($input: String) { nullableFieldWithDefaultArgumentValue(input: $input) }"#,
graphql_vars! {"input": null},
|result| {
assert_eq!(
result.get_field_value("fieldWithDefaultArgumentValue"),
Some(&graphql_value!(r#""Hello World""#)),
result.get_field_value("nullableFieldWithDefaultArgumentValue"),
Some(&graphql_value!(r#"None"#)),
);
},
)

View file

@ -60,11 +60,8 @@ where
self.variables
.as_ref()
.and_then(|iv| {
iv.to_object_value().map(|o| {
o.into_iter()
.map(|(k, v)| (k.to_owned(), v.clone()))
.collect()
})
iv.to_object_value()
.map(|o| o.into_iter().map(|(k, v)| (k.into(), v.clone())).collect())
})
.unwrap_or_default()
}
@ -86,11 +83,11 @@ where
///
/// This is a simple wrapper around the `execute_sync` function exposed at the
/// top level of this crate.
pub fn execute_sync<'a, QueryT, MutationT, SubscriptionT>(
&'a self,
root_node: &'a RootNode<QueryT, MutationT, SubscriptionT, S>,
pub fn execute_sync<QueryT, MutationT, SubscriptionT>(
&self,
root_node: &RootNode<QueryT, MutationT, SubscriptionT, S>,
context: &QueryT::Context,
) -> GraphQLResponse<'a, S>
) -> GraphQLResponse<S>
where
S: ScalarValue,
QueryT: GraphQLType<S>,
@ -114,7 +111,7 @@ where
&'a self,
root_node: &'a RootNode<'a, QueryT, MutationT, SubscriptionT, S>,
context: &'a QueryT::Context,
) -> GraphQLResponse<'a, S>
) -> GraphQLResponse<S>
where
QueryT: GraphQLTypeAsync<S>,
QueryT::TypeInfo: Sync,
@ -140,7 +137,7 @@ pub async fn resolve_into_stream<'req, 'rn, 'ctx, 'a, QueryT, MutationT, Subscri
req: &'req GraphQLRequest<S>,
root_node: &'rn RootNode<'a, QueryT, MutationT, SubscriptionT, S>,
context: &'ctx QueryT::Context,
) -> Result<(Value<ValuesStream<'a, S>>, Vec<ExecutionError<S>>), GraphQLError<'a>>
) -> Result<(Value<ValuesStream<'a, S>>, Vec<ExecutionError<S>>), GraphQLError>
where
'req: 'a,
'rn: 'a,
@ -166,16 +163,16 @@ where
/// to JSON and send it over the wire. Use the `is_ok` method to determine
/// whether to send a 200 or 400 HTTP status code.
#[derive(Debug)]
pub struct GraphQLResponse<'a, S = DefaultScalarValue>(
Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError<'a>>,
pub struct GraphQLResponse<S = DefaultScalarValue>(
Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError>,
);
impl<'a, S> GraphQLResponse<'a, S>
impl<S> GraphQLResponse<S>
where
S: ScalarValue,
{
/// Constructs new `GraphQLResponse` using the given result
pub fn from_result(r: Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError<'a>>) -> Self {
pub fn from_result(r: Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError>) -> Self {
Self(r)
}
@ -193,12 +190,11 @@ where
}
}
impl<'a, T> Serialize for GraphQLResponse<'a, T>
impl<T> Serialize for GraphQLResponse<T>
where
T: Serialize + ScalarValue,
Value<T>: Serialize,
ExecutionError<T>: Serialize,
GraphQLError<'a>: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
@ -272,7 +268,7 @@ where
&'a self,
root_node: &'a RootNode<QueryT, MutationT, SubscriptionT, S>,
context: &QueryT::Context,
) -> GraphQLBatchResponse<'a, S>
) -> GraphQLBatchResponse<S>
where
QueryT: GraphQLType<S>,
MutationT: GraphQLType<S, Context = QueryT::Context>,
@ -298,7 +294,7 @@ where
&'a self,
root_node: &'a RootNode<'a, QueryT, MutationT, SubscriptionT, S>,
context: &'a QueryT::Context,
) -> GraphQLBatchResponse<'a, S>
) -> GraphQLBatchResponse<S>
where
QueryT: GraphQLTypeAsync<S>,
QueryT::TypeInfo: Sync,
@ -340,20 +336,17 @@ where
/// wheter to send a 200 or 400 HTTP status code.
#[derive(Serialize)]
#[serde(untagged)]
pub enum GraphQLBatchResponse<'a, S = DefaultScalarValue>
pub enum GraphQLBatchResponse<S = DefaultScalarValue>
where
S: ScalarValue,
{
/// Result of a single operation in a GraphQL request.
Single(GraphQLResponse<'a, S>),
Single(GraphQLResponse<S>),
/// Result of a batch operation in a GraphQL request.
Batch(Vec<GraphQLResponse<'a, S>>),
Batch(Vec<GraphQLResponse<S>>),
}
impl<'a, S> GraphQLBatchResponse<'a, S>
where
S: ScalarValue,
{
impl<S: ScalarValue> GraphQLBatchResponse<S> {
/// Returns if all the GraphQLResponse in this operation are ok,
/// you can use it to determine wheter to send a 200 or 400 HTTP status code.
pub fn is_ok(&self) -> bool {
@ -639,20 +632,20 @@ pub mod tests {
"type":"connection_init",
"payload":{}
}"#
.to_owned(),
.into(),
),
WsIntegrationMessage::Expect(
r#"{
"type":"connection_ack"
}"#
.to_owned(),
.into(),
WS_INTEGRATION_EXPECT_DEFAULT_TIMEOUT,
),
WsIntegrationMessage::Expect(
r#"{
"type":"ka"
}"#
.to_owned(),
.into(),
WS_INTEGRATION_EXPECT_DEFAULT_TIMEOUT,
),
WsIntegrationMessage::Send(
@ -666,7 +659,7 @@ pub mod tests {
"query":"subscription { asyncHuman { id, name, homePlanet } }"
}
}"#
.to_owned(),
.into(),
),
WsIntegrationMessage::Expect(
r#"{
@ -682,7 +675,7 @@ pub mod tests {
}
}
}"#
.to_owned(),
.into(),
WS_INTEGRATION_EXPECT_DEFAULT_TIMEOUT,
),
];
@ -692,7 +685,7 @@ pub mod tests {
async fn test_ws_invalid_json<T: WsIntegration>(integration: &T) {
let messages = vec![
WsIntegrationMessage::Send("invalid json".to_owned()),
WsIntegrationMessage::Send("invalid json".into()),
WsIntegrationMessage::Expect(
r#"{
"type":"connection_error",
@ -700,7 +693,7 @@ pub mod tests {
"message":"serde error: expected value at line 1 column 1"
}
}"#
.to_owned(),
.into(),
WS_INTEGRATION_EXPECT_DEFAULT_TIMEOUT,
),
];
@ -715,20 +708,20 @@ pub mod tests {
"type":"connection_init",
"payload":{}
}"#
.to_owned(),
.into(),
),
WsIntegrationMessage::Expect(
r#"{
"type":"connection_ack"
}"#
.to_owned(),
.into(),
WS_INTEGRATION_EXPECT_DEFAULT_TIMEOUT
),
WsIntegrationMessage::Expect(
r#"{
"type":"ka"
}"#
.to_owned(),
.into(),
WS_INTEGRATION_EXPECT_DEFAULT_TIMEOUT
),
WsIntegrationMessage::Send(
@ -742,7 +735,7 @@ pub mod tests {
"query":"subscription { asyncHuman }"
}
}"#
.to_owned(),
.into(),
),
WsIntegrationMessage::Expect(
r#"{
@ -756,7 +749,7 @@ pub mod tests {
}]
}]
}"#
.to_owned(),
.into(),
WS_INTEGRATION_EXPECT_DEFAULT_TIMEOUT
)
];

View file

@ -32,8 +32,6 @@ use crate::{graphql_scalar, InputValue, ScalarValue, Value};
type BigDecimal = bigdecimal::BigDecimal;
mod bigdecimal_scalar {
use std::convert::TryFrom as _;
use super::*;
pub(super) fn to_output<S: ScalarValue>(v: &BigDecimal) -> Value<S> {
@ -45,13 +43,13 @@ mod bigdecimal_scalar {
Ok(BigDecimal::from(i))
} else if let Some(f) = v.as_float_value() {
BigDecimal::try_from(f)
.map_err(|e| format!("Failed to parse `BigDecimal` from `Float`: {}", e))
.map_err(|e| format!("Failed to parse `BigDecimal` from `Float`: {e}"))
} else {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
BigDecimal::from_str(s)
.map_err(|e| format!("Failed to parse `BigDecimal` from `String`: {}", e))
.map_err(|e| format!("Failed to parse `BigDecimal` from `String`: {e}"))
})
}
}
@ -88,11 +86,10 @@ mod test {
assert!(
parsed.is_ok(),
"failed to parse `{:?}`: {:?}",
input,
"failed to parse `{input:?}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {:?}", input);
assert_eq!(parsed.unwrap(), expected, "input: {input:?}");
}
}
@ -110,7 +107,7 @@ mod test {
let input: InputValue = input;
let parsed = BigDecimal::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -126,7 +123,7 @@ mod test {
] {
let actual: InputValue = BigDecimal::from_str(raw).unwrap().to_input_value();
assert_eq!(actual, graphql_input_value!((raw)), "on value: {}", raw);
assert_eq!(actual, graphql_input_value!((raw)), "on value: {raw}");
}
}
}

View file

@ -14,9 +14,9 @@ mod object_id {
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<ObjectId, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
ObjectId::parse_str(s).map_err(|e| format!("Failed to parse `ObjectId`: {}", e))
ObjectId::parse_str(s).map_err(|e| format!("Failed to parse `ObjectId`: {e}"))
})
}
}
@ -28,15 +28,18 @@ mod utc_date_time {
use super::*;
pub(super) fn to_output<S: ScalarValue>(v: &UtcDateTime) -> Value<S> {
Value::scalar((*v).to_rfc3339_string())
Value::scalar(
(*v).try_to_rfc3339_string()
.unwrap_or_else(|e| panic!("failed to format `UtcDateTime` as RFC3339: {e}")),
)
}
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<UtcDateTime, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
UtcDateTime::parse_rfc3339_str(s)
.map_err(|e| format!("Failed to parse `UtcDateTime`: {}", e))
.map_err(|e| format!("Failed to parse `UtcDateTime`: {e}"))
})
}
}

View file

@ -62,9 +62,9 @@ mod date {
S: ScalarValue,
{
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
Date::parse_from_str(s, FORMAT).map_err(|e| format!("Invalid `Date`: {}", e))
Date::parse_from_str(s, FORMAT).map_err(|e| format!("Invalid `Date`: {e}"))
})
}
}
@ -127,7 +127,7 @@ mod local_time {
S: ScalarValue,
{
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
// First, try to parse the most used format.
// At the end, try to parse the full format for the parsing
@ -135,7 +135,7 @@ mod local_time {
LocalTime::parse_from_str(s, FORMAT_NO_MILLIS)
.or_else(|_| LocalTime::parse_from_str(s, FORMAT_NO_SECS))
.or_else(|_| LocalTime::parse_from_str(s, FORMAT))
.map_err(|e| format!("Invalid `LocalTime`: {}", e))
.map_err(|e| format!("Invalid `LocalTime`: {e}"))
})
}
}
@ -166,10 +166,10 @@ mod local_date_time {
S: ScalarValue,
{
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
LocalDateTime::parse_from_str(s, FORMAT)
.map_err(|e| format!("Invalid `LocalDateTime`: {}", e))
.map_err(|e| format!("Invalid `LocalDateTime`: {e}"))
})
}
}
@ -219,10 +219,10 @@ mod date_time {
Tz: TimeZone + FromFixedOffset,
{
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
DateTime::<FixedOffset>::parse_from_rfc3339(s)
.map_err(|e| format!("Invalid `DateTime`: {}", e))
.map_err(|e| format!("Invalid `DateTime`: {e}"))
.map(FromFixedOffset::from_fixed_offset)
})
}
@ -345,11 +345,10 @@ mod date_test {
assert!(
parsed.is_ok(),
"failed to parse `{}`: {:?}",
raw,
"failed to parse `{raw}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {}", raw);
assert_eq!(parsed.unwrap(), expected, "input: {raw}");
}
}
@ -372,7 +371,7 @@ mod date_test {
let input: InputValue = input;
let parsed = Date::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -394,7 +393,7 @@ mod date_test {
] {
let actual: InputValue = val.to_input_value();
assert_eq!(actual, expected, "on value: {}", val);
assert_eq!(actual, expected, "on value: {val}");
}
}
}
@ -420,11 +419,10 @@ mod local_time_test {
assert!(
parsed.is_ok(),
"failed to parse `{}`: {:?}",
raw,
"failed to parse `{raw}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {}", raw);
assert_eq!(parsed.unwrap(), expected, "input: {raw}");
}
}
@ -451,7 +449,7 @@ mod local_time_test {
let input: InputValue = input;
let parsed = LocalTime::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -477,7 +475,7 @@ mod local_time_test {
] {
let actual: InputValue = val.to_input_value();
assert_eq!(actual, expected, "on value: {}", val);
assert_eq!(actual, expected, "on value: {val}");
}
}
}
@ -513,11 +511,10 @@ mod local_date_time_test {
assert!(
parsed.is_ok(),
"failed to parse `{}`: {:?}",
raw,
"failed to parse `{raw}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {}", raw);
assert_eq!(parsed.unwrap(), expected, "input: {raw}");
}
}
@ -546,7 +543,7 @@ mod local_date_time_test {
let input: InputValue = input;
let parsed = LocalDateTime::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -570,7 +567,7 @@ mod local_date_time_test {
] {
let actual: InputValue = val.to_input_value();
assert_eq!(actual, expected, "on value: {}", val);
assert_eq!(actual, expected, "on value: {val}");
}
}
}
@ -635,11 +632,10 @@ mod date_time_test {
assert!(
parsed.is_ok(),
"failed to parse `{}`: {:?}",
raw,
"failed to parse `{raw}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {}", raw);
assert_eq!(parsed.unwrap(), expected, "input: {raw}");
}
}
@ -673,7 +669,7 @@ mod date_time_test {
let input: InputValue = input;
let parsed = DateTime::<FixedOffset>::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -703,7 +699,7 @@ mod date_time_test {
] {
let actual: InputValue = val.to_input_value();
assert_eq!(actual, expected, "on value: {}", val);
assert_eq!(actual, expected, "on value: {val}");
}
}
}

View file

@ -34,10 +34,10 @@ mod tz {
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<TimeZone, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
s.parse::<TimeZone>()
.map_err(|e| format!("Failed to parse `TimeZone`: {}", e))
.map_err(|e| format!("Failed to parse `TimeZone`: {e}"))
})
}
}

View file

@ -34,8 +34,6 @@ use crate::{graphql_scalar, InputValue, ScalarValue, Value};
type Decimal = rust_decimal::Decimal;
mod rust_decimal_scalar {
use std::convert::TryFrom as _;
use super::*;
pub(super) fn to_output<S: ScalarValue>(v: &Decimal) -> Value<S> {
@ -46,14 +44,13 @@ mod rust_decimal_scalar {
if let Some(i) = v.as_int_value() {
Ok(Decimal::from(i))
} else if let Some(f) = v.as_float_value() {
Decimal::try_from(f)
.map_err(|e| format!("Failed to parse `Decimal` from `Float`: {}", e))
Decimal::try_from(f).map_err(|e| format!("Failed to parse `Decimal` from `Float`: {e}"))
} else {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
Decimal::from_str(s)
.map_err(|e| format!("Failed to parse `Decimal` from `String`: {}", e))
.map_err(|e| format!("Failed to parse `Decimal` from `String`: {e}"))
})
}
}
@ -84,11 +81,10 @@ mod test {
assert!(
parsed.is_ok(),
"failed to parse `{:?}`: {:?}",
input,
"failed to parse `{input:?}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {:?}", input);
assert_eq!(parsed.unwrap(), expected, "input: {input:?}");
}
}
@ -108,7 +104,7 @@ mod test {
let input: InputValue = input;
let parsed = Decimal::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -117,7 +113,7 @@ mod test {
for raw in ["4.20", "0", "999.999999999", "875533788", "123", "43.44"] {
let actual: InputValue = Decimal::from_str(raw).unwrap().to_input_value();
assert_eq!(actual, graphql_input_value!((raw)), "on value: {}", raw);
assert_eq!(actual, graphql_input_value!((raw)), "on value: {raw}");
}
}
}

View file

@ -1,8 +1,4 @@
use std::{
convert::{TryFrom as _, TryInto as _},
fmt,
marker::PhantomData,
};
use std::{fmt, marker::PhantomData};
use indexmap::IndexMap;
use serde::{
@ -42,7 +38,7 @@ impl<T: Serialize> Serialize for ExecutionError<T> {
}
}
impl<'a> Serialize for GraphQLError<'a> {
impl Serialize for GraphQLError {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
#[derive(Serialize)]
struct Helper {
@ -247,11 +243,11 @@ impl Serialize for SourcePosition {
}
}
impl<'a> Serialize for Spanning<ParseError<'a>> {
impl Serialize for Spanning<ParseError> {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
let mut map = ser.serialize_map(Some(2))?;
let msg = format!("{}", self.item);
let msg = self.item.to_string();
map.serialize_key("message")?;
map.serialize_value(&msg)?;
@ -396,7 +392,7 @@ mod tests {
#[test]
fn error_extensions() {
let mut obj: Object<DefaultScalarValue> = Object::with_capacity(1);
obj.add_field("foo".to_string(), Value::scalar("bar"));
obj.add_field("foo", Value::scalar("bar"));
assert_eq!(
to_string(&ExecutionError::at_origin(FieldError::new(
"foo error",

View file

@ -57,14 +57,14 @@ mod date {
pub(super) fn to_output<S: ScalarValue>(v: &Date) -> Value<S> {
Value::scalar(
v.format(FORMAT)
.unwrap_or_else(|e| panic!("Failed to format `Date`: {}", e)),
.unwrap_or_else(|e| panic!("Failed to format `Date`: {e}")),
)
}
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<Date, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.and_then(|s| Date::parse(s, FORMAT).map_err(|e| format!("Invalid `Date`: {}", e)))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| Date::parse(s, FORMAT).map_err(|e| format!("Invalid `Date`: {e}")))
}
}
@ -109,13 +109,13 @@ mod local_time {
} else {
v.format(FORMAT)
}
.unwrap_or_else(|e| panic!("Failed to format `LocalTime`: {}", e)),
.unwrap_or_else(|e| panic!("Failed to format `LocalTime`: {e}")),
)
}
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<LocalTime, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
// First, try to parse the most used format.
// At the end, try to parse the full format for the parsing
@ -123,7 +123,7 @@ mod local_time {
LocalTime::parse(s, FORMAT_NO_MILLIS)
.or_else(|_| LocalTime::parse(s, FORMAT_NO_SECS))
.or_else(|_| LocalTime::parse(s, FORMAT))
.map_err(|e| format!("Invalid `LocalTime`: {}", e))
.map_err(|e| format!("Invalid `LocalTime`: {e}"))
})
}
}
@ -146,16 +146,15 @@ mod local_date_time {
pub(super) fn to_output<S: ScalarValue>(v: &LocalDateTime) -> Value<S> {
Value::scalar(
v.format(FORMAT)
.unwrap_or_else(|e| panic!("Failed to format `LocalDateTime`: {}", e)),
.unwrap_or_else(|e| panic!("Failed to format `LocalDateTime`: {e}")),
)
}
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<LocalDateTime, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
LocalDateTime::parse(s, FORMAT)
.map_err(|e| format!("Invalid `LocalDateTime`: {}", e))
LocalDateTime::parse(s, FORMAT).map_err(|e| format!("Invalid `LocalDateTime`: {e}"))
})
}
}
@ -186,15 +185,15 @@ mod date_time {
Value::scalar(
v.to_offset(UtcOffset::UTC)
.format(&Rfc3339)
.unwrap_or_else(|e| panic!("Failed to format `DateTime`: {}", e)),
.unwrap_or_else(|e| panic!("Failed to format `DateTime`: {e}")),
)
}
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<DateTime, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
DateTime::parse(s, &Rfc3339).map_err(|e| format!("Invalid `DateTime`: {}", e))
DateTime::parse(s, &Rfc3339).map_err(|e| format!("Invalid `DateTime`: {e}"))
})
.map(|dt| dt.to_offset(UtcOffset::UTC))
}
@ -228,16 +227,16 @@ mod utc_offset {
pub(super) fn to_output<S: ScalarValue>(v: &UtcOffset) -> Value<S> {
Value::scalar(
v.format(UTC_OFFSET_FORMAT)
.unwrap_or_else(|e| panic!("Failed to format `UtcOffset`: {}", e)),
.unwrap_or_else(|e| panic!("Failed to format `UtcOffset`: {e}")),
)
}
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<UtcOffset, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| {
UtcOffset::parse(s, UTC_OFFSET_FORMAT)
.map_err(|e| format!("Invalid `UtcOffset`: {}", e))
.map_err(|e| format!("Invalid `UtcOffset`: {e}"))
})
}
}
@ -261,11 +260,10 @@ mod date_test {
assert!(
parsed.is_ok(),
"failed to parse `{}`: {:?}",
raw,
"failed to parse `{raw}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {}", raw);
assert_eq!(parsed.unwrap(), expected, "input: {raw}");
}
}
@ -288,7 +286,7 @@ mod date_test {
let input: InputValue = input;
let parsed = Date::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -302,7 +300,7 @@ mod date_test {
] {
let actual: InputValue = val.to_input_value();
assert_eq!(actual, expected, "on value: {}", val);
assert_eq!(actual, expected, "on value: {val}");
}
}
}
@ -330,11 +328,10 @@ mod local_time_test {
assert!(
parsed.is_ok(),
"failed to parse `{}`: {:?}",
raw,
"failed to parse `{raw}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {}", raw);
assert_eq!(parsed.unwrap(), expected, "input: {raw}");
}
}
@ -364,7 +361,7 @@ mod local_time_test {
let input: InputValue = input;
let parsed = LocalTime::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -378,7 +375,7 @@ mod local_time_test {
] {
let actual: InputValue = val.to_input_value();
assert_eq!(actual, expected, "on value: {}", val);
assert_eq!(actual, expected, "on value: {val}");
}
}
}
@ -402,11 +399,10 @@ mod local_date_time_test {
assert!(
parsed.is_ok(),
"failed to parse `{}`: {:?}",
raw,
"failed to parse `{raw}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {}", raw);
assert_eq!(parsed.unwrap(), expected, "input: {raw}");
}
}
@ -437,7 +433,7 @@ mod local_date_time_test {
let input: InputValue = input;
let parsed = LocalDateTime::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -455,7 +451,7 @@ mod local_date_time_test {
] {
let actual: InputValue = val.to_input_value();
assert_eq!(actual, expected, "on value: {}", val);
assert_eq!(actual, expected, "on value: {val}");
}
}
}
@ -490,11 +486,10 @@ mod date_time_test {
assert!(
parsed.is_ok(),
"failed to parse `{}`: {:?}",
raw,
"failed to parse `{raw}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {}", raw);
assert_eq!(parsed.unwrap(), expected, "input: {raw}");
}
}
@ -528,7 +523,7 @@ mod date_time_test {
let input: InputValue = input;
let parsed = DateTime::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -546,7 +541,7 @@ mod date_time_test {
] {
let actual: InputValue = val.to_input_value();
assert_eq!(actual, expected, "on value: {}", val);
assert_eq!(actual, expected, "on value: {val}");
}
}
}
@ -574,11 +569,10 @@ mod utc_offset_test {
assert!(
parsed.is_ok(),
"failed to parse `{}`: {:?}",
raw,
"failed to parse `{raw}`: {:?}",
parsed.unwrap_err(),
);
assert_eq!(parsed.unwrap(), expected, "input: {}", raw);
assert_eq!(parsed.unwrap(), expected, "input: {raw}");
}
}
@ -607,7 +601,7 @@ mod utc_offset_test {
let input: InputValue = input;
let parsed = UtcOffset::from_input_value(&input);
assert!(parsed.is_err(), "allows input: {:?}", input);
assert!(parsed.is_err(), "allows input: {input:?}");
}
}
@ -620,7 +614,7 @@ mod utc_offset_test {
] {
let actual: InputValue = val.to_input_value();
assert_eq!(actual, expected, "on value: {}", val);
assert_eq!(actual, expected, "on value: {val}");
}
}
}

View file

@ -14,8 +14,8 @@ mod url_scalar {
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<Url, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.and_then(|s| Url::parse(s).map_err(|e| format!("Failed to parse `Url`: {}", e)))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| Url::parse(s).map_err(|e| format!("Failed to parse `Url`: {e}")))
}
}

View file

@ -16,8 +16,8 @@ mod uuid_scalar {
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<Uuid, String> {
v.as_string_value()
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.and_then(|s| Uuid::parse_str(s).map_err(|e| format!("Failed to parse `Uuid`: {}", e)))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
.and_then(|s| Uuid::parse_str(s).map_err(|e| format!("Failed to parse `Uuid`: {e}")))
}
}

View file

@ -5,15 +5,12 @@ pub(crate) const INTROSPECTION_QUERY_WITHOUT_DESCRIPTIONS: &str =
/// The desired GraphQL introspection format for the canonical query
/// (<https://github.com/graphql/graphql-js/blob/90bd6ff72625173dd39a1f82cfad9336cfad8f65/src/utilities/getIntrospectionQuery.ts#L62>)
#[derive(Clone, Copy, Debug, Default)]
pub enum IntrospectionFormat {
/// The canonical GraphQL introspection query.
#[default]
All,
/// The canonical GraphQL introspection query without descriptions.
WithoutDescriptions,
}
impl Default for IntrospectionFormat {
fn default() -> Self {
IntrospectionFormat::All
}
}

View file

@ -6,6 +6,7 @@
// Required for using `juniper_codegen` macros inside this crate to resolve
// absolute `::juniper` path correctly, without errors.
extern crate core;
extern crate self as juniper;
use std::fmt;
@ -93,10 +94,10 @@ pub use crate::{
};
/// An error that prevented query execution
#[derive(Debug, PartialEq)]
#[allow(missing_docs)]
pub enum GraphQLError<'a> {
ParseError(Spanning<ParseError<'a>>),
#[derive(Debug, Eq, PartialEq)]
pub enum GraphQLError {
ParseError(Spanning<ParseError>),
ValidationError(Vec<RuleError>),
NoOperationProvided,
MultipleOperationsProvided,
@ -105,26 +106,38 @@ pub enum GraphQLError<'a> {
NotSubscription,
}
impl<'a> fmt::Display for GraphQLError<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
impl fmt::Display for GraphQLError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
GraphQLError::ParseError(error) => write!(f, "{}", error),
GraphQLError::ValidationError(errors) => {
for error in errors {
writeln!(f, "{}", error)?;
Self::ParseError(e) => write!(f, "{e}"),
Self::ValidationError(errs) => {
for e in errs {
writeln!(f, "{e}")?;
}
Ok(())
}
GraphQLError::NoOperationProvided => write!(f, "No operation provided"),
GraphQLError::MultipleOperationsProvided => write!(f, "Multiple operations provided"),
GraphQLError::UnknownOperationName => write!(f, "Unknown operation name"),
GraphQLError::IsSubscription => write!(f, "Operation is a subscription"),
GraphQLError::NotSubscription => write!(f, "Operation is not a subscription"),
Self::NoOperationProvided => write!(f, "No operation provided"),
Self::MultipleOperationsProvided => write!(f, "Multiple operations provided"),
Self::UnknownOperationName => write!(f, "Unknown operation name"),
Self::IsSubscription => write!(f, "Operation is a subscription"),
Self::NotSubscription => write!(f, "Operation is not a subscription"),
}
}
}
impl<'a> std::error::Error for GraphQLError<'a> {}
impl std::error::Error for GraphQLError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Self::ParseError(e) => Some(e),
Self::ValidationError(errs) => Some(errs.first()?),
Self::NoOperationProvided
| Self::MultipleOperationsProvided
| Self::UnknownOperationName
| Self::IsSubscription
| Self::NotSubscription => None,
}
}
}
/// Execute a query synchronously in a provided schema
pub fn execute_sync<'a, S, QueryT, MutationT, SubscriptionT>(
@ -133,7 +146,7 @@ pub fn execute_sync<'a, S, QueryT, MutationT, SubscriptionT>(
root_node: &'a RootNode<QueryT, MutationT, SubscriptionT, S>,
variables: &Variables<S>,
context: &QueryT::Context,
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError<'a>>
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError>
where
S: ScalarValue,
QueryT: GraphQLType<S>,
@ -172,7 +185,7 @@ pub async fn execute<'a, S, QueryT, MutationT, SubscriptionT>(
root_node: &'a RootNode<'a, QueryT, MutationT, SubscriptionT, S>,
variables: &Variables<S>,
context: &QueryT::Context,
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError<'a>>
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError>
where
QueryT: GraphQLTypeAsync<S>,
QueryT::TypeInfo: Sync,
@ -216,7 +229,7 @@ pub async fn resolve_into_stream<'a, S, QueryT, MutationT, SubscriptionT>(
root_node: &'a RootNode<'a, QueryT, MutationT, SubscriptionT, S>,
variables: &Variables<S>,
context: &'a QueryT::Context,
) -> Result<(Value<ValuesStream<'a, S>>, Vec<ExecutionError<S>>), GraphQLError<'a>>
) -> Result<(Value<ValuesStream<'a, S>>, Vec<ExecutionError<S>>), GraphQLError>
where
QueryT: GraphQLTypeAsync<S>,
QueryT::TypeInfo: Sync,
@ -259,7 +272,7 @@ pub fn introspect<'a, S, QueryT, MutationT, SubscriptionT>(
root_node: &'a RootNode<QueryT, MutationT, SubscriptionT, S>,
context: &QueryT::Context,
format: IntrospectionFormat,
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError<'a>>
) -> Result<(Value<S>, Vec<ExecutionError<S>>), GraphQLError>
where
S: ScalarValue,
QueryT: GraphQLType<S>,
@ -278,8 +291,8 @@ where
)
}
impl<'a> From<Spanning<ParseError<'a>>> for GraphQLError<'a> {
fn from(f: Spanning<ParseError<'a>>) -> GraphQLError<'a> {
GraphQLError::ParseError(f)
impl From<Spanning<ParseError>> for GraphQLError {
fn from(err: Spanning<ParseError>) -> Self {
Self::ParseError(err)
}
}

View file

@ -217,37 +217,35 @@ mod tests {
assert_eq!(
graphql_vars! {"key": 123},
vec![("key".to_owned(), IV::scalar(123))]
vec![("key".into(), IV::scalar(123))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": "val"},
vec![("key".to_owned(), IV::scalar("val"))]
vec![("key".into(), IV::scalar("val"))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": 1.23},
vec![("key".to_owned(), IV::scalar(1.23))]
vec![("key".into(), IV::scalar(1.23))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": 1 + 2},
vec![("key".to_owned(), IV::scalar(3))]
.into_iter()
.collect(),
vec![("key".into(), IV::scalar(3))].into_iter().collect(),
);
assert_eq!(
graphql_vars! {"key": false},
vec![("key".to_owned(), IV::scalar(false))]
vec![("key".into(), IV::scalar(false))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": (val)},
vec![("key".to_owned(), IV::scalar(42))]
vec![("key".into(), IV::scalar(42))]
.into_iter()
.collect::<V>(),
);
@ -257,13 +255,13 @@ mod tests {
fn r#enum() {
assert_eq!(
graphql_vars! {"key": ENUM},
vec![("key".to_owned(), IV::enum_value("ENUM"))]
vec![("key".into(), IV::enum_value("ENUM"))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": lowercase},
vec![("key".to_owned(), IV::enum_value("lowercase"))]
vec![("key".into(), IV::enum_value("lowercase"))]
.into_iter()
.collect::<V>(),
);
@ -273,19 +271,19 @@ mod tests {
fn variable() {
assert_eq!(
graphql_vars! {"key": @var},
vec![("key".to_owned(), IV::variable("var"))]
vec![("key".into(), IV::variable("var"))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": @array},
vec![("key".to_owned(), IV::variable("array"))]
vec![("key".into(), IV::variable("array"))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": @object},
vec![("key".to_owned(), IV::variable("object"))]
vec![("key".into(), IV::variable("object"))]
.into_iter()
.collect::<V>(),
);
@ -297,68 +295,65 @@ mod tests {
assert_eq!(
graphql_vars! {"key": []},
vec![("key".to_owned(), IV::list(vec![]))]
vec![("key".into(), IV::list(vec![]))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": [null]},
vec![("key".to_owned(), IV::list(vec![IV::Null]))]
vec![("key".into(), IV::list(vec![IV::Null]))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": [1]},
vec![("key".to_owned(), IV::list(vec![IV::scalar(1)]))]
vec![("key".into(), IV::list(vec![IV::scalar(1)]))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": [1 + 2]},
vec![("key".to_owned(), IV::list(vec![IV::scalar(3)]))]
vec![("key".into(), IV::list(vec![IV::scalar(3)]))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": [(val)]},
vec![("key".to_owned(), IV::list(vec![IV::scalar(42)]))]
vec![("key".into(), IV::list(vec![IV::scalar(42)]))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": [ENUM]},
vec![("key".to_owned(), IV::list(vec![IV::enum_value("ENUM")]))]
vec![("key".into(), IV::list(vec![IV::enum_value("ENUM")]))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": [lowercase]},
vec![(
"key".to_owned(),
IV::list(vec![IV::enum_value("lowercase")])
)]
.into_iter()
.collect::<V>(),
vec![("key".into(), IV::list(vec![IV::enum_value("lowercase")]))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": [@var]},
vec![("key".to_owned(), IV::list(vec![IV::variable("var")]))]
vec![("key".into(), IV::list(vec![IV::variable("var")]))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": [@array]},
vec![("key".to_owned(), IV::list(vec![IV::variable("array")]))]
vec![("key".into(), IV::list(vec![IV::variable("array")]))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": [@object]},
vec![("key".to_owned(), IV::list(vec![IV::variable("object")]))]
vec![("key".into(), IV::list(vec![IV::variable("object")]))]
.into_iter()
.collect::<V>(),
);
@ -366,7 +361,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": [1, [2], 3]},
vec![(
"key".to_owned(),
"key".into(),
IV::list(vec![
IV::scalar(1),
IV::list(vec![IV::scalar(2)]),
@ -379,7 +374,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": [1, [2 + 3], 3]},
vec![(
"key".to_owned(),
"key".into(),
IV::list(vec![
IV::scalar(1),
IV::list(vec![IV::scalar(5)]),
@ -392,7 +387,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": [1, [ENUM], (val)]},
vec![(
"key".to_owned(),
"key".into(),
IV::list(vec![
IV::scalar(1),
IV::list(vec![IV::enum_value("ENUM")]),
@ -405,7 +400,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": [1 + 2, [(val)], @val]},
vec![(
"key".to_owned(),
"key".into(),
IV::list(vec![
IV::scalar(3),
IV::list(vec![IV::scalar(42)]),
@ -418,7 +413,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": [1, [@val], ENUM]},
vec![(
"key".to_owned(),
"key".into(),
IV::list(vec![
IV::scalar(1),
IV::list(vec![IV::variable("val")]),
@ -436,14 +431,14 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {}},
vec![("key".to_owned(), IV::object(IndexMap::<String, _>::new()))]
vec![("key".into(), IV::object(IndexMap::<String, _>::new()))]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": {"key": null}},
vec![("key".to_owned(), IV::object(indexmap! {"key" => IV::Null}))]
vec![("key".into(), IV::object(indexmap! {"key" => IV::Null}))]
.into_iter()
.collect::<V>(),
);
@ -451,7 +446,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": 123}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::scalar(123)}),
)]
.into_iter()
@ -459,17 +454,14 @@ mod tests {
);
assert_eq!(
graphql_vars! {"key": {"key": 1 + 2}},
vec![(
"key".to_owned(),
IV::object(indexmap! {"key" => IV::scalar(3)}),
)]
.into_iter()
.collect::<V>(),
vec![("key".into(), IV::object(indexmap! {"key" => IV::scalar(3)}),)]
.into_iter()
.collect::<V>(),
);
assert_eq!(
graphql_vars! {"key": {"key": (val)}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::scalar(42)}),
)]
.into_iter()
@ -479,7 +471,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": []}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::list(vec![])}),
)]
.into_iter()
@ -488,7 +480,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": [null]}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::list(vec![IV::Null])}),
)]
.into_iter()
@ -497,7 +489,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": [1]}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::list(vec![IV::scalar(1)])}),
)]
.into_iter()
@ -506,7 +498,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": [1 + 2]}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::list(vec![IV::scalar(3)])}),
)]
.into_iter()
@ -515,7 +507,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": [(val)]}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::list(vec![IV::scalar(42)])}),
)]
.into_iter()
@ -524,7 +516,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": ENUM}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::enum_value("ENUM")}),
)]
.into_iter()
@ -533,7 +525,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": lowercase}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::enum_value("lowercase")}),
)]
.into_iter()
@ -542,7 +534,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": @val}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::variable("val")}),
)]
.into_iter()
@ -551,7 +543,7 @@ mod tests {
assert_eq!(
graphql_vars! {"key": {"key": @array}},
vec![(
"key".to_owned(),
"key".into(),
IV::object(indexmap! {"key" => IV::variable("array")}),
)]
.into_iter()
@ -576,7 +568,7 @@ mod tests {
},
vec![
(
"inner".to_owned(),
"inner".into(),
IV::object(indexmap! {
"key1" => IV::scalar(42),
"key2" => IV::scalar("val"),
@ -602,7 +594,7 @@ mod tests {
]),
}),
),
("more".to_owned(), IV::variable("var")),
("more".into(), IV::variable("var")),
]
.into_iter()
.collect::<V>(),

View file

@ -41,8 +41,7 @@ where
/// [`GraphQLType::name`]: crate::GraphQLType::name
pub fn err_unnamed_type<S>(name: &str) -> FieldError<S> {
FieldError::from(format!(
"Expected `{}` type to implement `GraphQLType::name`",
name,
"Expected `{name}` type to implement `GraphQLType::name`",
))
}

View file

@ -563,6 +563,38 @@ macro_rules! assert_interfaces_impls {
};
}
/// Asserts that all [transitive interfaces][0] (the ones implemented by the
/// `$interface`) are also implemented by the `$implementor`.
///
/// [0]: https://spec.graphql.org/October2021#sel-FAHbhBHCAACGB35P
#[macro_export]
macro_rules! assert_transitive_impls {
($scalar: ty, $interface: ty, $implementor: ty $(, $transitive: ty)* $(,)?) => {
const _: () = {
$({
let is_present = $crate::macros::reflect::str_exists_in_arr(
<$implementor as ::juniper::macros::reflect::BaseType<$scalar>>::NAME,
<$transitive as ::juniper::macros::reflect::BaseSubTypes<$scalar>>::NAMES,
);
if !is_present {
const MSG: &str = $crate::const_concat!(
"Failed to implement interface `",
<$interface as $crate::macros::reflect::BaseType<$scalar>>::NAME,
"` on `",
<$implementor as $crate::macros::reflect::BaseType<$scalar>>::NAME,
"`: missing `impl = ` for transitive interface `",
<$transitive as $crate::macros::reflect::BaseType<$scalar>>::NAME,
"` on `",
<$implementor as $crate::macros::reflect::BaseType<$scalar>>::NAME,
"`."
);
::std::panic!("{}", MSG);
}
})*
};
};
}
/// Asserts validness of [`Field`] [`Arguments`] and returned [`Type`].
///
/// This assertion is a combination of [`assert_subtype`] and
@ -863,11 +895,11 @@ macro_rules! const_concat {
}
const CON: [u8; LEN] = concat([$($s),*]);
// TODO: Use `str::from_utf8()` once it becomes `const`.
// SAFETY: This is safe, as we concatenate multiple UTF-8 strings one
// after another byte-by-byte.
#[allow(unsafe_code)]
unsafe { ::std::str::from_utf8_unchecked(&CON) }
// TODO: Use `.unwrap()` once it becomes `const`.
match ::std::str::from_utf8(&CON) {
::std::result::Result::Ok(s) => s,
_ => unreachable!(),
}
}};
}
@ -985,12 +1017,11 @@ macro_rules! format_type {
const TYPE_ARR: [u8; RES_LEN] = format_type_arr();
// TODO: Use `str::from_utf8()` once it becomes `const`.
// SAFETY: This is safe, as we concatenate multiple UTF-8 strings one
// after another byte-by-byte.
#[allow(unsafe_code)]
const TYPE_FORMATTED: &str =
unsafe { ::std::str::from_utf8_unchecked(TYPE_ARR.as_slice()) };
// TODO: Use `.unwrap()` once it becomes `const`.
const TYPE_FORMATTED: &str = match ::std::str::from_utf8(TYPE_ARR.as_slice()) {
::std::result::Result::Ok(s) => s,
_ => unreachable!(),
};
TYPE_FORMATTED
}};

View file

@ -22,7 +22,7 @@ use crate::{
pub fn parse_document_source<'a, 'b, S>(
s: &'a str,
schema: &'b SchemaType<'b, S>,
) -> UnlocatedParseResult<'a, OwnedDocument<'a, S>>
) -> UnlocatedParseResult<OwnedDocument<'a, S>>
where
S: ScalarValue,
{
@ -34,7 +34,7 @@ where
fn parse_document<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
) -> UnlocatedParseResult<'a, OwnedDocument<'a, S>>
) -> UnlocatedParseResult<OwnedDocument<'a, S>>
where
S: ScalarValue,
{
@ -52,7 +52,7 @@ where
fn parse_definition<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
) -> UnlocatedParseResult<'a, Definition<'a, S>>
) -> UnlocatedParseResult<Definition<'a, S>>
where
S: ScalarValue,
{
@ -66,14 +66,14 @@ where
Token::Name("fragment") => Ok(Definition::Fragment(parse_fragment_definition(
parser, schema,
)?)),
_ => Err(parser.next_token()?.map(ParseError::UnexpectedToken)),
_ => Err(parser.next_token()?.map(ParseError::unexpected_token)),
}
}
fn parse_operation_definition<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
) -> ParseResult<'a, Operation<'a, S>>
) -> ParseResult<Operation<'a, S>>
where
S: ScalarValue,
{
@ -129,7 +129,7 @@ where
fn parse_fragment_definition<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
) -> ParseResult<'a, Fragment<'a, S>>
) -> ParseResult<Fragment<'a, S>>
where
S: ScalarValue,
{
@ -139,7 +139,7 @@ where
let name = match parser.expect_name() {
Ok(n) => {
if n.item == "on" {
return Err(n.map(|_| ParseError::UnexpectedToken(Token::Name("on"))));
return Err(n.map(|_| ParseError::UnexpectedToken("on".into())));
} else {
n
}
@ -174,7 +174,7 @@ fn parse_optional_selection_set<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
fields: Option<&[&MetaField<'b, S>]>,
) -> OptionParseResult<'a, Vec<Selection<'a, S>>>
) -> OptionParseResult<Vec<Selection<'a, S>>>
where
S: ScalarValue,
{
@ -189,7 +189,7 @@ fn parse_selection_set<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
fields: Option<&[&MetaField<'b, S>]>,
) -> ParseResult<'a, Vec<Selection<'a, S>>>
) -> ParseResult<Vec<Selection<'a, S>>>
where
S: ScalarValue,
{
@ -204,7 +204,7 @@ fn parse_selection<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
fields: Option<&[&MetaField<'b, S>]>,
) -> UnlocatedParseResult<'a, Selection<'a, S>>
) -> UnlocatedParseResult<Selection<'a, S>>
where
S: ScalarValue,
{
@ -218,7 +218,7 @@ fn parse_fragment<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
fields: Option<&[&MetaField<'b, S>]>,
) -> UnlocatedParseResult<'a, Selection<'a, S>>
) -> UnlocatedParseResult<Selection<'a, S>>
where
S: ScalarValue,
{
@ -292,7 +292,7 @@ where
},
)))
}
_ => Err(parser.next_token()?.map(ParseError::UnexpectedToken)),
_ => Err(parser.next_token()?.map(ParseError::unexpected_token)),
}
}
@ -300,7 +300,7 @@ fn parse_field<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
fields: Option<&[&MetaField<'b, S>]>,
) -> ParseResult<'a, Field<'a, S>>
) -> ParseResult<Field<'a, S>>
where
S: ScalarValue,
{
@ -351,7 +351,7 @@ fn parse_arguments<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
arguments: Option<&[Argument<'b, S>]>,
) -> OptionParseResult<'a, Arguments<'a, S>>
) -> OptionParseResult<Arguments<'a, S>>
where
S: ScalarValue,
{
@ -376,7 +376,7 @@ fn parse_argument<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
arguments: Option<&[Argument<'b, S>]>,
) -> ParseResult<'a, (Spanning<&'a str>, Spanning<InputValue<S>>)>
) -> ParseResult<(Spanning<&'a str>, Spanning<InputValue<S>>)>
where
S: ScalarValue,
{
@ -395,21 +395,21 @@ where
))
}
fn parse_operation_type<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, OperationType> {
fn parse_operation_type(parser: &mut Parser<'_>) -> ParseResult<OperationType> {
match parser.peek().item {
Token::Name("query") => Ok(parser.next_token()?.map(|_| OperationType::Query)),
Token::Name("mutation") => Ok(parser.next_token()?.map(|_| OperationType::Mutation)),
Token::Name("subscription") => {
Ok(parser.next_token()?.map(|_| OperationType::Subscription))
}
_ => Err(parser.next_token()?.map(ParseError::UnexpectedToken)),
_ => Err(parser.next_token()?.map(ParseError::unexpected_token)),
}
}
fn parse_variable_definitions<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
) -> OptionParseResult<'a, VariableDefinitions<'a, S>>
) -> OptionParseResult<VariableDefinitions<'a, S>>
where
S: ScalarValue,
{
@ -433,7 +433,7 @@ where
fn parse_variable_definition<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
) -> ParseResult<'a, (Spanning<&'a str>, VariableDefinition<'a, S>)>
) -> ParseResult<(Spanning<&'a str>, VariableDefinition<'a, S>)>
where
S: ScalarValue,
{
@ -473,7 +473,7 @@ where
fn parse_directives<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
) -> OptionParseResult<'a, Vec<Spanning<Directive<'a, S>>>>
) -> OptionParseResult<Vec<Spanning<Directive<'a, S>>>>
where
S: ScalarValue,
{
@ -492,7 +492,7 @@ where
fn parse_directive<'a, 'b, S>(
parser: &mut Parser<'a>,
schema: &'b SchemaType<'b, S>,
) -> ParseResult<'a, Directive<'a, S>>
) -> ParseResult<Directive<'a, S>>
where
S: ScalarValue,
{
@ -516,7 +516,7 @@ where
))
}
pub fn parse_type<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, Type<'a>> {
pub fn parse_type<'a>(parser: &mut Parser<'a>) -> ParseResult<Type<'a>> {
let parsed_type = if let Some(Spanning {
start: start_pos, ..
}) = parser.skip(&Token::BracketOpen)?
@ -541,10 +541,7 @@ pub fn parse_type<'a>(parser: &mut Parser<'a>) -> ParseResult<'a, Type<'a>> {
})
}
fn wrap_non_null<'a>(
parser: &mut Parser<'a>,
inner: Spanning<Type<'a>>,
) -> ParseResult<'a, Type<'a>> {
fn wrap_non_null<'a>(parser: &mut Parser<'a>, inner: Spanning<Type<'a>>) -> ParseResult<Type<'a>> {
let Spanning { end: end_pos, .. } = parser.expect(&Token::ExclamationMark)?;
let wrapped = match inner.item {

View file

@ -20,8 +20,8 @@ pub struct Lexer<'a> {
/// A single scalar value literal
///
/// This is only used for tagging how the lexer has interpreted a value literal
#[derive(Debug, PartialEq, Clone, Copy)]
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum ScalarToken<'a> {
String(&'a str),
Float(&'a str),
@ -29,8 +29,8 @@ pub enum ScalarToken<'a> {
}
/// A single token in the input source
#[derive(Debug, PartialEq, Clone, Copy)]
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Token<'a> {
Name(&'a str),
Scalar(ScalarToken<'a>),
@ -239,7 +239,7 @@ impl<'a> Lexer<'a> {
c if escaped => {
return Err(Spanning::zero_width(
&old_pos,
LexerError::UnknownEscapeSequence(format!("\\{}", c)),
LexerError::UnknownEscapeSequence(format!("\\{c}")),
));
}
'\\' => escaped = true,
@ -305,14 +305,14 @@ impl<'a> Lexer<'a> {
if len != 4 {
return Err(Spanning::zero_width(
start_pos,
LexerError::UnknownEscapeSequence("\\u".to_owned() + escape),
LexerError::UnknownEscapeSequence(format!("\\u{escape}")),
));
}
let code_point = u32::from_str_radix(escape, 16).map_err(|_| {
Spanning::zero_width(
start_pos,
LexerError::UnknownEscapeSequence("\\u".to_owned() + escape),
LexerError::UnknownEscapeSequence(format!("\\u{escape}")),
)
})?;
@ -338,7 +338,7 @@ impl<'a> Lexer<'a> {
let mut end_idx = loop {
if let Some((idx, ch)) = self.peek_char() {
if ch.is_digit(10) || (ch == '-' && last_idx == start_idx) {
if ch.is_ascii_digit() || (ch == '-' && last_idx == start_idx) {
if ch == '0' && last_char == '0' && last_idx == start_idx {
return Err(Spanning::zero_width(
&self.position,
@ -367,7 +367,7 @@ impl<'a> Lexer<'a> {
self.next_char();
end_idx = loop {
if let Some((idx, ch)) = self.peek_char() {
if ch.is_digit(10) {
if ch.is_ascii_digit() {
self.next_char();
} else if last_idx == start_idx {
return Err(Spanning::zero_width(
@ -396,7 +396,9 @@ impl<'a> Lexer<'a> {
end_idx = loop {
if let Some((idx, ch)) = self.peek_char() {
if ch.is_digit(10) || (last_idx == start_idx && (ch == '-' || ch == '+')) {
if ch.is_ascii_digit()
|| (last_idx == start_idx && (ch == '-' || ch == '+'))
{
self.next_char();
} else if last_idx == start_idx {
// 1e is not a valid floating point number
@ -483,9 +485,9 @@ impl<'a> Iterator for Lexer<'a> {
impl<'a> fmt::Display for Token<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Token::Name(name) => write!(f, "{}", name),
Token::Name(name) => write!(f, "{name}"),
Token::Scalar(ScalarToken::Int(s)) | Token::Scalar(ScalarToken::Float(s)) => {
write!(f, "{}", s)
write!(f, "{s}")
}
Token::Scalar(ScalarToken::String(s)) => {
write!(f, "\"{}\"", s.replace('\\', "\\\\").replace('"', "\\\""))
@ -527,15 +529,15 @@ fn is_number_start(c: char) -> bool {
impl fmt::Display for LexerError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
LexerError::UnknownCharacter(c) => write!(f, "Unknown character \"{}\"", c),
LexerError::UnknownCharacter(c) => write!(f, "Unknown character \"{c}\""),
LexerError::UnterminatedString => write!(f, "Unterminated string literal"),
LexerError::UnknownCharacterInString(c) => {
write!(f, "Unknown character \"{}\" in string literal", c)
write!(f, "Unknown character \"{c}\" in string literal")
}
LexerError::UnknownEscapeSequence(ref s) => {
write!(f, "Unknown escape sequence \"{}\" in string", s)
write!(f, "Unknown escape sequence \"{s}\" in string")
}
LexerError::UnexpectedCharacter(c) => write!(f, "Unexpected character \"{}\"", c),
LexerError::UnexpectedCharacter(c) => write!(f, "Unexpected character \"{c}\""),
LexerError::UnexpectedEndOfFile => write!(f, "Unexpected end of input"),
LexerError::InvalidNumber => write!(f, "Invalid number literal"),
}

View file

@ -1,12 +1,16 @@
use std::{fmt, result::Result};
use std::{error::Error, fmt, result::Result};
use smartstring::alias::String;
use crate::parser::{Lexer, LexerError, Spanning, Token};
/// Error while parsing a GraphQL query
#[derive(Debug, PartialEq)]
pub enum ParseError<'a> {
#[derive(Debug, Eq, PartialEq)]
pub enum ParseError {
/// An unexpected token occurred in the source
UnexpectedToken(Token<'a>),
// TODO: Previously was `Token<'a>`.
// Revisit on `graphql-parser` integration.
UnexpectedToken(String),
/// The input source abruptly ended
UnexpectedEndOfFile,
@ -18,14 +22,51 @@ pub enum ParseError<'a> {
ExpectedScalarError(&'static str),
}
#[doc(hidden)]
pub type ParseResult<'a, T> = Result<Spanning<T>, Spanning<ParseError<'a>>>;
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::UnexpectedToken(token) => write!(f, "Unexpected \"{token}\""),
Self::UnexpectedEndOfFile => write!(f, "Unexpected end of input"),
Self::LexerError(e) => e.fmt(f),
Self::ExpectedScalarError(e) => e.fmt(f),
}
}
}
impl Error for ParseError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
Self::LexerError(e) => Some(e),
Self::ExpectedScalarError(_) | Self::UnexpectedToken(_) | Self::UnexpectedEndOfFile => {
None
}
}
}
}
impl ParseError {
/// Creates a [`ParseError::UnexpectedToken`] out of the provided [`Token`].
#[must_use]
pub fn unexpected_token(token: Token<'_>) -> Self {
use std::fmt::Write as _;
let mut s = String::new();
// PANIC: Unwrapping is OK here, as it may panic only on allocation
// error.
write!(s, "{token}").unwrap();
Self::UnexpectedToken(s)
}
}
#[doc(hidden)]
pub type UnlocatedParseResult<'a, T> = Result<T, Spanning<ParseError<'a>>>;
pub type ParseResult<T> = Result<Spanning<T>, Spanning<ParseError>>;
#[doc(hidden)]
pub type OptionParseResult<'a, T> = Result<Option<Spanning<T>>, Spanning<ParseError<'a>>>;
pub type UnlocatedParseResult<T> = Result<T, Spanning<ParseError>>;
#[doc(hidden)]
pub type OptionParseResult<T> = Result<Option<Spanning<T>>, Spanning<ParseError>>;
#[doc(hidden)]
#[derive(Debug)]
@ -54,7 +95,7 @@ impl<'a> Parser<'a> {
}
#[doc(hidden)]
pub fn next_token(&mut self) -> ParseResult<'a, Token<'a>> {
pub fn next_token(&mut self) -> ParseResult<Token<'a>> {
if self.tokens.len() == 1 {
Err(Spanning::start_end(
&self.peek().start,
@ -67,9 +108,9 @@ impl<'a> Parser<'a> {
}
#[doc(hidden)]
pub fn expect(&mut self, expected: &Token) -> ParseResult<'a, Token<'a>> {
pub fn expect(&mut self, expected: &Token) -> ParseResult<Token<'a>> {
if &self.peek().item != expected {
Err(self.next_token()?.map(ParseError::UnexpectedToken))
Err(self.next_token()?.map(ParseError::unexpected_token))
} else {
self.next_token()
}
@ -79,7 +120,7 @@ impl<'a> Parser<'a> {
pub fn skip(
&mut self,
expected: &Token,
) -> Result<Option<Spanning<Token<'a>>>, Spanning<ParseError<'a>>> {
) -> Result<Option<Spanning<Token<'a>>>, Spanning<ParseError>> {
if &self.peek().item == expected {
Ok(Some(self.next_token()?))
} else if self.peek().item == Token::EndOfFile {
@ -98,10 +139,10 @@ impl<'a> Parser<'a> {
opening: &Token,
parser: F,
closing: &Token,
) -> ParseResult<'a, Vec<Spanning<T>>>
) -> ParseResult<Vec<Spanning<T>>>
where
T: fmt::Debug,
F: Fn(&mut Parser<'a>) -> ParseResult<'a, T>,
F: Fn(&mut Parser<'a>) -> ParseResult<T>,
{
let Spanning {
start: start_pos, ..
@ -123,10 +164,10 @@ impl<'a> Parser<'a> {
opening: &Token,
parser: F,
closing: &Token,
) -> ParseResult<'a, Vec<Spanning<T>>>
) -> ParseResult<Vec<Spanning<T>>>
where
T: fmt::Debug,
F: Fn(&mut Parser<'a>) -> ParseResult<'a, T>,
F: Fn(&mut Parser<'a>) -> ParseResult<T>,
{
let Spanning {
start: start_pos, ..
@ -148,10 +189,10 @@ impl<'a> Parser<'a> {
opening: &Token,
parser: F,
closing: &Token,
) -> ParseResult<'a, Vec<T>>
) -> ParseResult<Vec<T>>
where
T: fmt::Debug,
F: Fn(&mut Parser<'a>) -> UnlocatedParseResult<'a, T>,
F: Fn(&mut Parser<'a>) -> UnlocatedParseResult<T>,
{
let Spanning {
start: start_pos, ..
@ -168,7 +209,7 @@ impl<'a> Parser<'a> {
}
#[doc(hidden)]
pub fn expect_name(&mut self) -> ParseResult<'a, &'a str> {
pub fn expect_name(&mut self) -> ParseResult<&'a str> {
match *self.peek() {
Spanning {
item: Token::Name(_),
@ -188,20 +229,7 @@ impl<'a> Parser<'a> {
&self.peek().end,
ParseError::UnexpectedEndOfFile,
)),
_ => Err(self.next_token()?.map(ParseError::UnexpectedToken)),
_ => Err(self.next_token()?.map(ParseError::unexpected_token)),
}
}
}
impl<'a> fmt::Display for ParseError<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ParseError::UnexpectedToken(ref token) => write!(f, "Unexpected \"{}\"", token),
ParseError::UnexpectedEndOfFile => write!(f, "Unexpected end of input"),
ParseError::LexerError(ref err) => err.fmt(f),
ParseError::ExpectedScalarError(err) => err.fmt(f),
}
}
}
impl<'a> std::error::Error for ParseError<'a> {}

View file

@ -16,18 +16,15 @@ where
s,
&SchemaType::new::<QueryRoot, MutationRoot, SubscriptionRoot>(&(), &(), &()),
)
.expect(&format!("Parse error on input {:#?}", s))
.expect(&format!("Parse error on input {s:#?}"))
}
fn parse_document_error<'a, S>(s: &'a str) -> Spanning<ParseError<'a>>
where
S: ScalarValue,
{
fn parse_document_error<S: ScalarValue>(s: &str) -> Spanning<ParseError> {
match parse_document_source::<S>(
s,
&SchemaType::new::<QueryRoot, MutationRoot, SubscriptionRoot>(&(), &(), &()),
) {
Ok(doc) => panic!("*No* parse error on input {:#?} =>\n{:#?}", s, doc),
Ok(doc) => panic!("*No* parse error on input {s:#?} =>\n{doc:#?}"),
Err(err) => err,
}
}
@ -136,7 +133,7 @@ fn errors() {
Spanning::start_end(
&SourcePosition::new(36, 1, 19),
&SourcePosition::new(40, 1, 23),
ParseError::UnexpectedToken(Token::Name("Type"))
ParseError::UnexpectedToken("Type".into())
)
);
@ -145,7 +142,7 @@ fn errors() {
Spanning::start_end(
&SourcePosition::new(8, 0, 8),
&SourcePosition::new(9, 0, 9),
ParseError::UnexpectedToken(Token::CurlyClose)
ParseError::unexpected_token(Token::CurlyClose)
)
);
}

View file

@ -13,8 +13,8 @@ fn tokenize_to_vec<'a>(s: &'a str) -> Vec<Spanning<Token<'a>>> {
break;
}
}
Some(Err(e)) => panic!("Error in input stream: {:#?} for {:#?}", e, s),
None => panic!("EOF before EndOfFile token in {:#?}", s),
Some(Err(e)) => panic!("Error in input stream: {e:#?} for {s:#?}"),
None => panic!("EOF before EndOfFile token in {s:#?}"),
}
}
@ -37,13 +37,13 @@ fn tokenize_error(s: &str) -> Spanning<LexerError> {
match lexer.next() {
Some(Ok(t)) => {
if t.item == Token::EndOfFile {
panic!("Tokenizer did not return error for {:#?}", s);
panic!("Tokenizer did not return error for {s:#?}");
}
}
Some(Err(e)) => {
return e;
}
None => panic!("Tokenizer did not return error for {:#?}", s),
None => panic!("Tokenizer did not return error for {s:#?}"),
}
}
}
@ -196,7 +196,7 @@ fn strings() {
Spanning::start_end(
&SourcePosition::new(0, 0, 0),
&SourcePosition::new(34, 0, 34),
Token::Scalar(ScalarToken::String(r#"unicode \u1234\u5678\u90AB\uCDEF"#))
Token::Scalar(ScalarToken::String(r#"unicode \u1234\u5678\u90AB\uCDEF"#)),
)
);
}
@ -207,7 +207,7 @@ fn string_errors() {
tokenize_error("\""),
Spanning::zero_width(
&SourcePosition::new(1, 0, 1),
LexerError::UnterminatedString
LexerError::UnterminatedString,
)
);
@ -215,7 +215,7 @@ fn string_errors() {
tokenize_error("\"no end quote"),
Spanning::zero_width(
&SourcePosition::new(13, 0, 13),
LexerError::UnterminatedString
LexerError::UnterminatedString,
)
);
@ -223,7 +223,7 @@ fn string_errors() {
tokenize_error("\"contains unescaped \u{0007} control char\""),
Spanning::zero_width(
&SourcePosition::new(20, 0, 20),
LexerError::UnknownCharacterInString('\u{0007}')
LexerError::UnknownCharacterInString('\u{0007}'),
)
);
@ -231,7 +231,7 @@ fn string_errors() {
tokenize_error("\"null-byte is not \u{0000} end of file\""),
Spanning::zero_width(
&SourcePosition::new(18, 0, 18),
LexerError::UnknownCharacterInString('\u{0000}')
LexerError::UnknownCharacterInString('\u{0000}'),
)
);
@ -239,7 +239,7 @@ fn string_errors() {
tokenize_error("\"multi\nline\""),
Spanning::zero_width(
&SourcePosition::new(6, 0, 6),
LexerError::UnterminatedString
LexerError::UnterminatedString,
)
);
@ -247,7 +247,7 @@ fn string_errors() {
tokenize_error("\"multi\rline\""),
Spanning::zero_width(
&SourcePosition::new(6, 0, 6),
LexerError::UnterminatedString
LexerError::UnterminatedString,
)
);
@ -255,7 +255,7 @@ fn string_errors() {
tokenize_error(r#""bad \z esc""#),
Spanning::zero_width(
&SourcePosition::new(6, 0, 6),
LexerError::UnknownEscapeSequence("\\z".to_owned())
LexerError::UnknownEscapeSequence("\\z".into()),
)
);
@ -263,7 +263,7 @@ fn string_errors() {
tokenize_error(r#""bad \x esc""#),
Spanning::zero_width(
&SourcePosition::new(6, 0, 6),
LexerError::UnknownEscapeSequence("\\x".to_owned())
LexerError::UnknownEscapeSequence("\\x".into()),
)
);
@ -271,7 +271,7 @@ fn string_errors() {
tokenize_error(r#""bad \u1 esc""#),
Spanning::zero_width(
&SourcePosition::new(6, 0, 6),
LexerError::UnknownEscapeSequence("\\u1".to_owned())
LexerError::UnknownEscapeSequence("\\u1".into()),
)
);
@ -279,7 +279,7 @@ fn string_errors() {
tokenize_error(r#""bad \u0XX1 esc""#),
Spanning::zero_width(
&SourcePosition::new(6, 0, 6),
LexerError::UnknownEscapeSequence("\\u0XX1".to_owned())
LexerError::UnknownEscapeSequence("\\u0XX1".into()),
)
);
@ -287,7 +287,7 @@ fn string_errors() {
tokenize_error(r#""bad \uXXXX esc""#),
Spanning::zero_width(
&SourcePosition::new(6, 0, 6),
LexerError::UnknownEscapeSequence("\\uXXXX".to_owned())
LexerError::UnknownEscapeSequence("\\uXXXX".into()),
)
);
@ -295,7 +295,7 @@ fn string_errors() {
tokenize_error(r#""bad \uFXXX esc""#),
Spanning::zero_width(
&SourcePosition::new(6, 0, 6),
LexerError::UnknownEscapeSequence("\\uFXXX".to_owned())
LexerError::UnknownEscapeSequence("\\uFXXX".into()),
)
);
@ -303,7 +303,7 @@ fn string_errors() {
tokenize_error(r#""bad \uXXXF esc""#),
Spanning::zero_width(
&SourcePosition::new(6, 0, 6),
LexerError::UnknownEscapeSequence("\\uXXXF".to_owned())
LexerError::UnknownEscapeSequence("\\uXXXF".into()),
)
);
@ -349,9 +349,7 @@ fn numbers() {
Token::Scalar(ScalarToken::Float(actual)) => {
assert!(
expected == actual,
"[expected] {} != {} [actual]",
expected,
actual
"[expected] {expected} != {actual} [actual]",
);
}
_ => assert!(false),
@ -662,39 +660,32 @@ fn punctuation_error() {
#[test]
fn display() {
assert_eq!(format!("{}", Token::Name("identifier")), "identifier");
assert_eq!(format!("{}", Token::Scalar(ScalarToken::Int("123"))), "123");
assert_eq!(
format!("{}", Token::Scalar(ScalarToken::Float("4.5"))),
"4.5"
);
assert_eq!(
format!("{}", Token::Scalar(ScalarToken::String("some string"))),
"\"some string\""
);
assert_eq!(
format!(
"{}",
Token::Scalar(ScalarToken::String("string with \\ escape and \" quote"))
for (input, expected) in [
(Token::Name("identifier"), "identifier"),
(Token::Scalar(ScalarToken::Int("123")), "123"),
(Token::Scalar(ScalarToken::Float("4.5")), "4.5"),
(
Token::Scalar(ScalarToken::String("some string")),
"\"some string\"",
),
"\"string with \\\\ escape and \\\" quote\""
);
assert_eq!(format!("{}", Token::ExclamationMark), "!");
assert_eq!(format!("{}", Token::Dollar), "$");
assert_eq!(format!("{}", Token::ParenOpen), "(");
assert_eq!(format!("{}", Token::ParenClose), ")");
assert_eq!(format!("{}", Token::BracketOpen), "[");
assert_eq!(format!("{}", Token::BracketClose), "]");
assert_eq!(format!("{}", Token::CurlyOpen), "{");
assert_eq!(format!("{}", Token::CurlyClose), "}");
assert_eq!(format!("{}", Token::Ellipsis), "...");
assert_eq!(format!("{}", Token::Colon), ":");
assert_eq!(format!("{}", Token::Equals), "=");
assert_eq!(format!("{}", Token::At), "@");
assert_eq!(format!("{}", Token::Pipe), "|");
(
Token::Scalar(ScalarToken::String("string with \\ escape and \" quote")),
"\"string with \\\\ escape and \\\" quote\"",
),
(Token::ExclamationMark, "!"),
(Token::Dollar, "$"),
(Token::ParenOpen, "("),
(Token::ParenClose, ")"),
(Token::BracketOpen, "["),
(Token::BracketClose, "]"),
(Token::CurlyOpen, "{"),
(Token::CurlyClose, "}"),
(Token::Ellipsis, "..."),
(Token::Colon, ":"),
(Token::Equals, "="),
(Token::At, "@"),
(Token::Pipe, "|"),
] {
assert_eq!(input.to_string(), expected);
}
}

View file

@ -65,11 +65,11 @@ where
S: ScalarValue,
{
let mut lexer = Lexer::new(s);
let mut parser = Parser::new(&mut lexer).expect(&format!("Lexer error on input {:#?}", s));
let mut parser = Parser::new(&mut lexer).expect(&format!("Lexer error on input {s:#?}"));
let schema = SchemaType::new::<Query, EmptyMutation<()>, EmptySubscription<()>>(&(), &(), &());
parse_value_literal(&mut parser, false, &schema, Some(meta))
.expect(&format!("Parse error on input {:#?}", s))
.expect(&format!("Parse error on input {s:#?}"))
}
#[test]

View file

@ -81,7 +81,7 @@ impl<T> Spanning<T> {
}
}
/// Modify the contents of the spanned item
/// Modify the contents of the spanned item.
pub fn map<O, F: Fn(T) -> O>(self, f: F) -> Spanning<O> {
Spanning {
item: f(self.item),
@ -89,6 +89,13 @@ impl<T> Spanning<T> {
end: self.end,
}
}
/// Modifies the contents of the spanned item in case `f` returns [`Some`],
/// or returns [`None`] otherwise.
pub fn and_then<O, F: Fn(T) -> Option<O>>(self, f: F) -> Option<Spanning<O>> {
let (start, end) = (self.start, self.end);
f(self.item).map(|item| Spanning { item, start, end })
}
}
impl<T: fmt::Display> fmt::Display for Spanning<T> {

View file

@ -9,12 +9,12 @@ use crate::{
value::ScalarValue,
};
pub fn parse_value_literal<'a, 'b, S>(
parser: &mut Parser<'a>,
pub fn parse_value_literal<'b, S>(
parser: &mut Parser<'_>,
is_const: bool,
schema: &'b SchemaType<'b, S>,
tpe: Option<&MetaType<'b, S>>,
) -> ParseResult<'a, InputValue<S>>
) -> ParseResult<InputValue<S>>
where
S: ScalarValue,
{
@ -113,16 +113,16 @@ where
},
_,
) => Ok(parser.next_token()?.map(|_| InputValue::enum_value(name))),
_ => Err(parser.next_token()?.map(ParseError::UnexpectedToken)),
_ => Err(parser.next_token()?.map(ParseError::unexpected_token)),
}
}
fn parse_list_literal<'a, 'b, S>(
parser: &mut Parser<'a>,
fn parse_list_literal<'b, S>(
parser: &mut Parser<'_>,
is_const: bool,
schema: &'b SchemaType<'b, S>,
tpe: Option<&MetaType<'b, S>>,
) -> ParseResult<'a, InputValue<S>>
) -> ParseResult<InputValue<S>>
where
S: ScalarValue,
{
@ -135,12 +135,12 @@ where
.map(InputValue::parsed_list))
}
fn parse_object_literal<'a, 'b, S>(
parser: &mut Parser<'a>,
fn parse_object_literal<'b, S>(
parser: &mut Parser<'_>,
is_const: bool,
schema: &'b SchemaType<'b, S>,
object_tpe: Option<&InputObjectMeta<'b, S>>,
) -> ParseResult<'a, InputValue<S>>
) -> ParseResult<InputValue<S>>
where
S: ScalarValue,
{
@ -153,12 +153,12 @@ where
.map(|items| InputValue::parsed_object(items.into_iter().map(|s| s.item).collect())))
}
fn parse_object_field<'a, 'b, S>(
parser: &mut Parser<'a>,
fn parse_object_field<'b, S>(
parser: &mut Parser<'_>,
is_const: bool,
schema: &'b SchemaType<'b, S>,
object_meta: Option<&InputObjectMeta<'b, S>>,
) -> ParseResult<'a, (Spanning<String>, Spanning<InputValue<S>>)>
) -> ParseResult<(Spanning<String>, Spanning<InputValue<S>>)>
where
S: ScalarValue,
{
@ -179,7 +179,7 @@ where
))
}
fn parse_variable_literal<'a, S>(parser: &mut Parser<'a>) -> ParseResult<'a, InputValue<S>>
fn parse_variable_literal<S>(parser: &mut Parser<'_>) -> ParseResult<InputValue<S>>
where
S: ScalarValue,
{
@ -199,12 +199,12 @@ where
))
}
fn parse_scalar_literal_by_infered_type<'a, 'b, S>(
token: ScalarToken<'a>,
fn parse_scalar_literal_by_infered_type<'b, S>(
token: ScalarToken<'_>,
start: &SourcePosition,
end: &SourcePosition,
schema: &'b SchemaType<'b, S>,
) -> ParseResult<'a, InputValue<S>>
) -> ParseResult<InputValue<S>>
where
S: ScalarValue,
{

View file

@ -16,7 +16,7 @@ use crate::{
};
/// Whether an item is deprecated, with context.
#[derive(Debug, PartialEq, Hash, Clone)]
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub enum DeprecationStatus {
/// The field/variant is not deprecated.
Current,
@ -58,7 +58,7 @@ pub struct ScalarMeta<'a, S> {
pub type InputValueParseFn<S> = for<'b> fn(&'b InputValue<S>) -> Result<(), FieldError<S>>;
/// Shortcut for a [`ScalarToken`] parsing function.
pub type ScalarTokenParseFn<S> = for<'b> fn(ScalarToken<'b>) -> Result<S, ParseError<'b>>;
pub type ScalarTokenParseFn<S> = for<'b> fn(ScalarToken<'b>) -> Result<S, ParseError>;
/// List type metadata
#[derive(Debug)]
@ -110,6 +110,8 @@ pub struct InterfaceMeta<'a, S> {
pub description: Option<String>,
#[doc(hidden)]
pub fields: Vec<Field<'a, S>>,
#[doc(hidden)]
pub interface_names: Vec<String>,
}
/// Union type metadata
@ -403,7 +405,7 @@ impl<'a, S> MetaType<'a, S> {
// "used exclusively by GraphQLs introspection system"
{
name.starts_with("__") ||
// <https://facebook.github.io/graphql/draft/#sec-Scalars>
// https://spec.graphql.org/October2021#sec-Scalars
name == "Boolean" || name == "String" || name == "Int" || name == "Float" || name == "ID" ||
// Our custom empty markers
name == "_EmptyMutation" || name == "_EmptySubscription"
@ -453,7 +455,7 @@ impl<'a, S> ScalarMeta<'a, S> {
/// Overwrites any previously set description.
#[must_use]
pub fn description(mut self, description: &str) -> Self {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
@ -523,7 +525,7 @@ impl<'a, S> ObjectMeta<'a, S> {
/// Overwrites any previously set description.
#[must_use]
pub fn description(mut self, description: &str) -> Self {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
@ -534,7 +536,7 @@ impl<'a, S> ObjectMeta<'a, S> {
pub fn interfaces(mut self, interfaces: &[Type<'a>]) -> Self {
self.interface_names = interfaces
.iter()
.map(|t| t.innermost_name().to_owned())
.map(|t| t.innermost_name().into())
.collect();
self
}
@ -566,7 +568,7 @@ impl<'a, S> EnumMeta<'a, S> {
/// Overwrites any previously set description.
#[must_use]
pub fn description(mut self, description: &str) -> Self {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
@ -587,6 +589,7 @@ impl<'a, S> InterfaceMeta<'a, S> {
name,
description: None,
fields: fields.to_vec(),
interface_names: Vec::new(),
}
}
@ -595,7 +598,19 @@ impl<'a, S> InterfaceMeta<'a, S> {
/// Overwrites any previously set description.
#[must_use]
pub fn description(mut self, description: &str) -> Self {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
/// Sets the `interfaces` this [`InterfaceMeta`] interface implements.
///
/// Overwrites any previously set list of interfaces.
#[must_use]
pub fn interfaces(mut self, interfaces: &[Type<'a>]) -> Self {
self.interface_names = interfaces
.iter()
.map(|t| t.innermost_name().into())
.collect();
self
}
@ -612,10 +627,7 @@ impl<'a> UnionMeta<'a> {
Self {
name,
description: None,
of_type_names: of_types
.iter()
.map(|t| t.innermost_name().to_owned())
.collect(),
of_type_names: of_types.iter().map(|t| t.innermost_name().into()).collect(),
}
}
@ -624,7 +636,7 @@ impl<'a> UnionMeta<'a> {
/// Overwrites any previously set description.
#[must_use]
pub fn description(mut self, description: &str) -> Self {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
@ -656,7 +668,7 @@ impl<'a, S> InputObjectMeta<'a, S> {
/// Overwrites any previously set description.
#[must_use]
pub fn description(mut self, description: &str) -> Self {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
@ -672,7 +684,7 @@ impl<'a, S> Field<'a, S> {
/// Overwrites any previously set description.
#[must_use]
pub fn description(mut self, description: &str) -> Self {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
@ -697,7 +709,7 @@ impl<'a, S> Field<'a, S> {
/// Overwrites any previously set deprecation reason.
#[must_use]
pub fn deprecated(mut self, reason: Option<&str>) -> Self {
self.deprecation_status = DeprecationStatus::Deprecated(reason.map(ToOwned::to_owned));
self.deprecation_status = DeprecationStatus::Deprecated(reason.map(Into::into));
self
}
}
@ -706,7 +718,7 @@ impl<'a, S> Argument<'a, S> {
/// Builds a new [`Argument`] of the given [`Type`] with the given `name`.
pub fn new(name: &str, arg_type: Type<'a>) -> Self {
Self {
name: name.to_owned(),
name: name.into(),
description: None,
arg_type,
default_value: None,
@ -718,7 +730,7 @@ impl<'a, S> Argument<'a, S> {
/// Overwrites any previously set description.
#[must_use]
pub fn description(mut self, description: &str) -> Self {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
@ -736,7 +748,7 @@ impl EnumValue {
/// Constructs a new [`EnumValue`] with the provided `name`.
pub fn new(name: &str) -> Self {
Self {
name: name.to_owned(),
name: name.into(),
description: None,
deprecation_status: DeprecationStatus::Current,
}
@ -747,7 +759,7 @@ impl EnumValue {
/// Overwrites any previously set description.
#[must_use]
pub fn description(mut self, description: &str) -> Self {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
@ -756,7 +768,7 @@ impl EnumValue {
/// Overwrites any previously set deprecation reason.
#[must_use]
pub fn deprecated(mut self, reason: Option<&str>) -> Self {
self.deprecation_status = DeprecationStatus::Deprecated(reason.map(ToOwned::to_owned));
self.deprecation_status = DeprecationStatus::Deprecated(reason.map(Into::into));
self
}
}

View file

@ -167,8 +167,7 @@ where
/// [GraphQL Schema Language](https://graphql.org/learn/schema/#type-language)
/// format.
pub fn as_schema_language(&self) -> String {
let doc = self.as_parser_document();
format!("{}", doc)
self.as_parser_document().to_string()
}
#[cfg(feature = "graphql-parser")]
@ -210,17 +209,14 @@ impl<'a, S> SchemaType<'a, S> {
registry.get_type::<SchemaType<S>>(&());
directives.insert("skip".to_owned(), DirectiveType::new_skip(&mut registry));
directives.insert("skip".into(), DirectiveType::new_skip(&mut registry));
directives.insert("include".into(), DirectiveType::new_include(&mut registry));
directives.insert(
"include".to_owned(),
DirectiveType::new_include(&mut registry),
);
directives.insert(
"deprecated".to_owned(),
"deprecated".into(),
DirectiveType::new_deprecated(&mut registry),
);
directives.insert(
"specifiedBy".to_owned(),
"specifiedBy".into(),
DirectiveType::new_specified_by(&mut registry),
);
@ -243,7 +239,7 @@ impl<'a, S> SchemaType<'a, S> {
for meta_type in registry.types.values() {
if let MetaType::Placeholder(PlaceholderMeta { ref of_type }) = *meta_type {
panic!("Type {:?} is still a placeholder type", of_type);
panic!("Type {of_type:?} is still a placeholder type");
}
}
SchemaType {
@ -508,9 +504,9 @@ where
locations: &[DirectiveLocation],
arguments: &[Argument<'a, S>],
is_repeatable: bool,
) -> DirectiveType<'a, S> {
DirectiveType {
name: name.to_owned(),
) -> Self {
Self {
name: name.into(),
description: None,
locations: locations.to_vec(),
arguments: arguments.to_vec(),
@ -578,7 +574,7 @@ where
}
pub fn description(mut self, description: &str) -> DirectiveType<'a, S> {
self.description = Some(description.to_owned());
self.description = Some(description.into());
self
}
}
@ -605,8 +601,8 @@ impl<'a, S> fmt::Display for TypeType<'a, S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::Concrete(t) => f.write_str(t.name().unwrap()),
Self::List(i, _) => write!(f, "[{}]", i),
Self::NonNull(i) => write!(f, "{}!", i),
Self::List(i, _) => write!(f, "[{i}]"),
Self::NonNull(i) => write!(f, "{i}!"),
}
}
}
@ -644,10 +640,7 @@ mod test {
"#,
)
.unwrap();
assert_eq!(
format!("{}", ast),
format!("{}", schema.as_parser_document()),
);
assert_eq!(ast.to_string(), schema.as_parser_document().to_string());
}
}
@ -691,10 +684,10 @@ mod test {
}
/// This is whatever's description.
fn whatever() -> String {
"foo".to_string()
"foo".into()
}
fn arr(stuff: Vec<Coordinate>) -> Option<&'static str> {
(!stuff.is_empty()).then(|| "stuff")
(!stuff.is_empty()).then_some("stuff")
}
fn fruit() -> Fruit {
Fruit::Apple
@ -754,7 +747,7 @@ mod test {
"#,
)
.unwrap();
assert_eq!(format!("{}", ast), schema.as_schema_language());
assert_eq!(ast.to_string(), schema.as_schema_language());
}
}
}

View file

@ -211,13 +211,19 @@ impl<'a, S: ScalarValue + 'a> TypeType<'a, S> {
}
}
fn fields(&self, #[graphql(default)] include_deprecated: bool) -> Option<Vec<&Field<S>>> {
fn fields(
&self,
#[graphql(default = false)] include_deprecated: Option<bool>,
) -> Option<Vec<&Field<S>>> {
match self {
TypeType::Concrete(&MetaType::Interface(InterfaceMeta { ref fields, .. }))
| TypeType::Concrete(&MetaType::Object(ObjectMeta { ref fields, .. })) => Some(
fields
.iter()
.filter(|f| include_deprecated || !f.deprecation_status.is_deprecated())
.filter(|f| {
include_deprecated.unwrap_or_default()
|| !f.deprecation_status.is_deprecated()
})
.filter(|f| !f.name.starts_with("__"))
.collect(),
),
@ -228,7 +234,7 @@ impl<'a, S: ScalarValue + 'a> TypeType<'a, S> {
fn of_type(&self) -> Option<&TypeType<S>> {
match self {
TypeType::Concrete(_) => None,
TypeType::List(l, _) | TypeType::NonNull(l) => Some(&*l),
TypeType::List(l, _) | TypeType::NonNull(l) => Some(&**l),
}
}
@ -244,10 +250,16 @@ impl<'a, S: ScalarValue + 'a> TypeType<'a, S> {
fn interfaces<'s>(&self, context: &'s SchemaType<'a, S>) -> Option<Vec<TypeType<'s, S>>> {
match self {
TypeType::Concrete(&MetaType::Object(ObjectMeta {
ref interface_names,
..
})) => Some(
TypeType::Concrete(
&MetaType::Object(ObjectMeta {
ref interface_names,
..
})
| &MetaType::Interface(InterfaceMeta {
ref interface_names,
..
}),
) => Some(
interface_names
.iter()
.filter_map(|n| context.type_by_name(n))
@ -276,16 +288,16 @@ impl<'a, S: ScalarValue + 'a> TypeType<'a, S> {
.iter()
.filter_map(|&ct| {
if let MetaType::Object(ObjectMeta {
ref name,
ref interface_names,
name,
interface_names,
..
}) = *ct
}) = ct
{
if interface_names.contains(&iface_name.to_string()) {
context.type_by_name(name)
} else {
None
}
interface_names
.iter()
.any(|name| name == iface_name)
.then(|| context.type_by_name(name))
.flatten()
} else {
None
}
@ -296,12 +308,18 @@ impl<'a, S: ScalarValue + 'a> TypeType<'a, S> {
}
}
fn enum_values(&self, #[graphql(default)] include_deprecated: bool) -> Option<Vec<&EnumValue>> {
fn enum_values(
&self,
#[graphql(default = false)] include_deprecated: Option<bool>,
) -> Option<Vec<&EnumValue>> {
match self {
TypeType::Concrete(&MetaType::Enum(EnumMeta { ref values, .. })) => Some(
values
.iter()
.filter(|f| include_deprecated || !f.deprecation_status.is_deprecated())
.filter(|f| {
include_deprecated.unwrap_or_default()
|| !f.deprecation_status.is_deprecated()
})
.collect(),
),
_ => None,

View file

@ -190,8 +190,11 @@ impl GraphQLParserTranslator {
position: Pos::default(),
description: x.description.as_ref().map(|s| From::from(s.as_str())),
name: From::from(x.name.as_ref()),
// TODO: Support this with GraphQL October 2021 Edition.
implements_interfaces: vec![],
implements_interfaces: x
.interface_names
.iter()
.map(|s| From::from(s.as_str()))
.collect(),
directives: vec![],
fields: x
.fields
@ -282,22 +285,18 @@ where
DeprecationStatus::Current => None,
DeprecationStatus::Deprecated(reason) => Some(ExternalDirective {
position: Pos::default(),
name: From::from("deprecated"),
arguments: if let Some(reason) = reason {
vec![(
From::from("reason"),
ExternalValue::String(reason.to_string()),
)]
} else {
vec![]
},
name: "deprecated".into(),
arguments: reason
.as_ref()
.map(|rsn| vec![(From::from("reason"), ExternalValue::String(rsn.into()))])
.unwrap_or_default(),
}),
}
}
// Right now the only directive supported is `@deprecated`. `@skip` and `@include`
// are dealt with elsewhere.
// <https://facebook.github.io/graphql/draft/#sec-Type-System.Directives>
// Right now the only directive supported is `@deprecated`.
// `@skip` and `@include` are dealt with elsewhere.
// https://spec.graphql.org/October2021#sec-Type-System.Directives.Built-in-Directives
fn generate_directives<'a, T>(status: &DeprecationStatus) -> Vec<ExternalDirective<'a, T>>
where
T: Text<'a>,

View file

@ -93,12 +93,12 @@ impl Human {
home_planet: Option<&str>,
) -> Self {
Self {
id: id.to_owned(),
name: name.to_owned(),
friend_ids: friend_ids.iter().copied().map(ToOwned::to_owned).collect(),
id: id.into(),
name: name.into(),
friend_ids: friend_ids.iter().copied().map(Into::into).collect(),
appears_in: appears_in.to_vec(),
secret_backstory: secret_backstory.map(ToOwned::to_owned),
home_planet: home_planet.map(|p| p.to_owned()),
secret_backstory: secret_backstory.map(Into::into),
home_planet: home_planet.map(Into::into),
}
}
}
@ -153,12 +153,12 @@ impl Droid {
primary_function: Option<&str>,
) -> Self {
Self {
id: id.to_owned(),
name: name.to_owned(),
friend_ids: friend_ids.iter().copied().map(ToOwned::to_owned).collect(),
id: id.into(),
name: name.into(),
friend_ids: friend_ids.iter().copied().map(Into::into).collect(),
appears_in: appears_in.to_vec(),
secret_backstory: secret_backstory.map(ToOwned::to_owned),
primary_function: primary_function.map(ToOwned::to_owned),
secret_backstory: secret_backstory.map(Into::into),
primary_function: primary_function.map(Into::into),
}
}
}
@ -192,7 +192,7 @@ impl Droid {
}
}
#[derive(Default, Clone)]
#[derive(Clone, Default)]
pub struct Database {
humans: HashMap<String, Human>,
droids: HashMap<String, Droid>,
@ -206,7 +206,7 @@ impl Database {
let mut droids = HashMap::new();
humans.insert(
"1000".to_owned(),
"1000".into(),
Human::new(
"1000",
"Luke Skywalker",
@ -218,7 +218,7 @@ impl Database {
);
humans.insert(
"1001".to_owned(),
"1001".into(),
Human::new(
"1001",
"Darth Vader",
@ -230,7 +230,7 @@ impl Database {
);
humans.insert(
"1002".to_owned(),
"1002".into(),
Human::new(
"1002",
"Han Solo",
@ -242,7 +242,7 @@ impl Database {
);
humans.insert(
"1003".to_owned(),
"1003".into(),
Human::new(
"1003",
"Leia Organa",
@ -254,7 +254,7 @@ impl Database {
);
humans.insert(
"1004".to_owned(),
"1004".into(),
Human::new(
"1004",
"Wilhuff Tarkin",
@ -266,7 +266,7 @@ impl Database {
);
droids.insert(
"2000".to_owned(),
"2000".into(),
Droid::new(
"2000",
"C-3PO",
@ -278,7 +278,7 @@ impl Database {
);
droids.insert(
"2001".to_owned(),
"2001".into(),
Droid::new(
"2001",
"R2-D2",

View file

@ -1173,7 +1173,7 @@ pub(crate) fn schema_introspection_result() -> Value {
}
],
"inputFields": null,
"interfaces": null,
"interfaces": [],
"enumValues": null,
"possibleTypes": [
{
@ -2500,7 +2500,7 @@ pub(crate) fn schema_introspection_result_without_descriptions() -> Value {
}
],
"inputFields": null,
"interfaces": null,
"interfaces": [],
"enumValues": null,
"possibleTypes": [
{

View file

@ -1,4 +1,4 @@
use std::{iter, iter::FromIterator as _, pin::Pin};
use std::{iter, pin::Pin};
use futures::{stream, StreamExt as _};
@ -48,9 +48,9 @@ impl MySubscription {
async fn async_human() -> HumanStream {
Box::pin(stream::once(async {
Human {
id: "stream id".to_string(),
name: "stream name".to_string(),
home_planet: "stream home planet".to_string(),
id: "stream id".into(),
name: "stream name".into(),
home_planet: "stream home planet".into(),
}
}))
}
@ -78,7 +78,7 @@ impl MySubscription {
Human {
id,
name,
home_planet: "default home planet".to_string(),
home_planet: "default home planet".into(),
}
}))
}
@ -154,10 +154,10 @@ fn returns_requested_object() {
id
name
}
}"#
.to_string();
}"#;
let (names, collected_values) = create_and_execute(query).expect("Got error from stream");
let (names, collected_values) =
create_and_execute(query.into()).expect("Got error from stream");
let mut iterator_count = 0;
let expected_values = vec![vec![Ok(Value::Object(Object::from_iter(
@ -182,10 +182,9 @@ fn returns_error() {
id
name
}
}"#
.to_string();
}"#;
let response = create_and_execute(query);
let response = create_and_execute(query.into());
assert!(response.is_err());
@ -206,10 +205,10 @@ fn can_access_context() {
humanWithContext {
id
}
}"#
.to_string();
}"#;
let (names, collected_values) = create_and_execute(query).expect("Got error from stream");
let (names, collected_values) =
create_and_execute(query.into()).expect("Got error from stream");
let mut iterator_count = 0;
let expected_values = vec![vec![Ok(Value::Object(Object::from_iter(iter::from_fn(
@ -234,10 +233,10 @@ fn resolves_typed_inline_fragments() {
id
}
}
}"#
.to_string();
}"#;
let (names, collected_values) = create_and_execute(query).expect("Got error from stream");
let (names, collected_values) =
create_and_execute(query.into()).expect("Got error from stream");
let mut iterator_count = 0;
let expected_values = vec![vec![Ok(Value::Object(Object::from_iter(iter::from_fn(
@ -262,10 +261,10 @@ fn resolves_nontyped_inline_fragments() {
id
}
}
}"#
.to_string();
}"#;
let (names, collected_values) = create_and_execute(query).expect("Got error from stream");
let (names, collected_values) =
create_and_execute(query.into()).expect("Got error from stream");
let mut iterator_count = 0;
let expected_values = vec![vec![Ok(Value::Object(Object::from_iter(iter::from_fn(
@ -289,10 +288,10 @@ fn can_access_arguments() {
id
name
}
}"#
.to_string();
}"#;
let (names, collected_values) = create_and_execute(query).expect("Got error from stream");
let (names, collected_values) =
create_and_execute(query.into()).expect("Got error from stream");
let mut iterator_count = 0;
let expected_values = vec![vec![Ok(Value::Object(Object::from_iter(iter::from_fn(
@ -317,10 +316,10 @@ fn type_alias() {
id
name
}
}"#
.to_string();
}"#;
let (names, collected_values) = create_and_execute(query).expect("Got error from stream");
let (names, collected_values) =
create_and_execute(query.into()).expect("Got error from stream");
let mut iterator_count = 0;
let expected_values = vec![vec![Ok(Value::Object(Object::from_iter(iter::from_fn(

View file

@ -75,15 +75,15 @@ fn test_node() {
baz
}"#;
let node_info = NodeTypeInfo {
name: "MyNode".to_string(),
attribute_names: vec!["foo".to_string(), "bar".to_string(), "baz".to_string()],
name: "MyNode".into(),
attribute_names: vec!["foo".into(), "bar".into(), "baz".into()],
};
let mut node = Node {
attributes: IndexMap::new(),
};
node.attributes.insert("foo".to_string(), "1".to_string());
node.attributes.insert("bar".to_string(), "2".to_string());
node.attributes.insert("baz".to_string(), "3".to_string());
node.attributes.insert("foo".into(), "1".into());
node.attributes.insert("bar".into(), "2".into());
node.attributes.insert("baz".into(), "3".into());
let schema: RootNode<_, _, _> = RootNode::new_with_info(
node,
EmptyMutation::new(),

View file

@ -1,3 +1,5 @@
use std::future;
use crate::{
ast::Selection,
executor::{ExecutionResult, Executor},
@ -30,7 +32,7 @@ where
///
/// The default implementation panics.
///
/// [3]: https://spec.graphql.org/June2018/#sec-Objects
/// [3]: https://spec.graphql.org/October2021#sec-Objects
fn resolve_field_async<'a>(
&'a self,
_info: &'a Self::TypeInfo,
@ -54,9 +56,9 @@ where
///
/// The default implementation panics.
///
/// [1]: https://spec.graphql.org/June2018/#sec-Interfaces
/// [2]: https://spec.graphql.org/June2018/#sec-Unions
/// [3]: https://spec.graphql.org/June2018/#sec-Objects
/// [1]: https://spec.graphql.org/October2021#sec-Interfaces
/// [2]: https://spec.graphql.org/October2021#sec-Unions
/// [3]: https://spec.graphql.org/October2021#sec-Objects
fn resolve_into_type_async<'a>(
&'a self,
info: &'a Self::TypeInfo,
@ -91,8 +93,8 @@ where
///
/// The default implementation panics, if `selection_set` is [`None`].
///
/// [0]: https://spec.graphql.org/June2018/#sec-Errors-and-Non-Nullability
/// [3]: https://spec.graphql.org/June2018/#sec-Objects
/// [0]: https://spec.graphql.org/October2021#sec-Handling-Field-Errors
/// [3]: https://spec.graphql.org/October2021#sec-Objects
fn resolve_async<'a>(
&'a self,
info: &'a Self::TypeInfo,
@ -226,7 +228,7 @@ where
panic!(
"Field {} not found on type {:?}",
f.name.item,
meta_type.name()
meta_type.name(),
)
});
@ -242,7 +244,9 @@ where
f.arguments.as_ref().map(|m| {
m.item
.iter()
.map(|&(ref k, ref v)| (k.item, v.item.clone().into_const(exec_vars)))
.filter_map(|&(ref k, ref v)| {
v.item.clone().into_const(exec_vars).map(|v| (k.item, v))
})
.collect()
}),
&meta_field.arguments,
@ -252,7 +256,7 @@ where
let is_non_null = meta_field.field_type.is_non_null();
let response_name = response_name.to_string();
async_values.push(AsyncValueFuture::Field(async move {
async_values.push_back(AsyncValueFuture::Field(async move {
// TODO: implement custom future type instead of
// two-level boxing.
let res = instance
@ -315,12 +319,12 @@ where
if let Ok(Value::Object(obj)) = sub_result {
for (k, v) in obj {
async_values.push(AsyncValueFuture::FragmentSpread(async move {
AsyncValue::Field(AsyncField {
async_values.push_back(AsyncValueFuture::FragmentSpread(
future::ready(AsyncValue::Field(AsyncField {
name: k,
value: Some(v),
})
}));
})),
));
}
} else if let Err(e) = sub_result {
sub_exec.push_error_at(e, *start_pos);
@ -360,19 +364,19 @@ where
if let Ok(Value::Object(obj)) = sub_result {
for (k, v) in obj {
async_values.push(AsyncValueFuture::InlineFragment1(async move {
AsyncValue::Field(AsyncField {
async_values.push_back(AsyncValueFuture::InlineFragment1(
future::ready(AsyncValue::Field(AsyncField {
name: k,
value: Some(v),
})
}));
})),
));
}
} else if let Err(e) = sub_result {
sub_exec.push_error_at(e, *start_pos);
}
}
} else {
async_values.push(AsyncValueFuture::InlineFragment2(async move {
async_values.push_back(AsyncValueFuture::InlineFragment2(async move {
let value = resolve_selection_set_into_async(
instance,
info,

View file

@ -49,7 +49,6 @@ pub enum TypeKind {
/// ## Input objects
///
/// Represents complex values provided in queries _into_ the system.
#[graphql(name = "INPUT_OBJECT")]
InputObject,
/// ## List types
@ -63,7 +62,6 @@ pub enum TypeKind {
///
/// In GraphQL, nullable types are the default. By putting a `!` after a\
/// type, it becomes non-nullable.
#[graphql(name = "NON_NULL")]
NonNull,
}
@ -89,7 +87,7 @@ impl<'a, S> Arguments<'a, S> {
if let (Some(args), Some(meta_args)) = (&mut args, meta_args) {
for arg in meta_args {
let arg_name = arg.name.as_str();
if args.get(arg_name).map_or(true, InputValue::is_null) {
if args.get(arg_name).is_none() {
if let Some(val) = arg.default_value.as_ref() {
args.insert(arg_name, val.clone());
}
@ -150,14 +148,14 @@ impl<'a, S> Arguments<'a, S> {
/// This trait is intended to be used in a conjunction with a [`GraphQLType`] trait. See the example
/// in the documentation of a [`GraphQLType`] trait.
///
/// [1]: https://spec.graphql.org/June2018/#sec-Interfaces
/// [2]: https://spec.graphql.org/June2018/#sec-Unions
/// [3]: https://spec.graphql.org/June2018/#sec-Objects
/// [4]: https://spec.graphql.org/June2018/#sec-Scalars
/// [5]: https://spec.graphql.org/June2018/#sec-Enums
/// [6]: https://spec.graphql.org/June2018/#sec-Type-System.List
/// [7]: https://spec.graphql.org/June2018/#sec-Type-System.Non-Null
/// [8]: https://spec.graphql.org/June2018/#sec-Input-Objects
/// [1]: https://spec.graphql.org/October2021#sec-Interfaces
/// [2]: https://spec.graphql.org/October2021#sec-Unions
/// [3]: https://spec.graphql.org/October2021#sec-Objects
/// [4]: https://spec.graphql.org/October2021#sec-Scalars
/// [5]: https://spec.graphql.org/October2021#sec-Enums
/// [6]: https://spec.graphql.org/October2021#sec-List
/// [7]: https://spec.graphql.org/October2021#sec-Non-Null
/// [8]: https://spec.graphql.org/October2021#sec-Input-Objects
/// [11]: https://doc.rust-lang.org/reference/items/traits.html#object-safety
/// [12]: https://doc.rust-lang.org/reference/types/trait-object.html
pub trait GraphQLValue<S = DefaultScalarValue>
@ -196,7 +194,7 @@ where
///
/// The default implementation panics.
///
/// [3]: https://spec.graphql.org/June2018/#sec-Objects
/// [3]: https://spec.graphql.org/October2021#sec-Objects
fn resolve_field(
&self,
_info: &Self::TypeInfo,
@ -217,9 +215,9 @@ where
///
/// The default implementation panics.
///
/// [1]: https://spec.graphql.org/June2018/#sec-Interfaces
/// [2]: https://spec.graphql.org/June2018/#sec-Unions
/// [3]: https://spec.graphql.org/June2018/#sec-Objects
/// [1]: https://spec.graphql.org/October2021#sec-Interfaces
/// [2]: https://spec.graphql.org/October2021#sec-Unions
/// [3]: https://spec.graphql.org/October2021#sec-Objects
fn resolve_into_type(
&self,
info: &Self::TypeInfo,
@ -243,9 +241,9 @@ where
///
/// The default implementation panics.
///
/// [1]: https://spec.graphql.org/June2018/#sec-Interfaces
/// [2]: https://spec.graphql.org/June2018/#sec-Unions
/// [3]: https://spec.graphql.org/June2018/#sec-Objects
/// [1]: https://spec.graphql.org/October2021#sec-Interfaces
/// [2]: https://spec.graphql.org/October2021#sec-Unions
/// [3]: https://spec.graphql.org/October2021#sec-Objects
#[allow(unused_variables)]
fn concrete_type_name(&self, context: &Self::Context, info: &Self::TypeInfo) -> String {
panic!(
@ -271,8 +269,8 @@ where
///
/// The default implementation panics, if `selection_set` is [`None`].
///
/// [0]: https://spec.graphql.org/June2018/#sec-Errors-and-Non-Nullability
/// [3]: https://spec.graphql.org/June2018/#sec-Objects
/// [0]: https://spec.graphql.org/October2021#sec-Errors-and-Non-Nullability
/// [3]: https://spec.graphql.org/October2021#sec-Objects
fn resolve(
&self,
info: &Self::TypeInfo,
@ -379,13 +377,13 @@ where
/// // schema in `meta()` above, or a validation failed because of a this library bug.
/// //
/// // In either of those two cases, the only reasonable way out is to panic the thread.
/// _ => panic!("Field {} not found on type User", field_name),
/// _ => panic!("Field {field_name} not found on type User"),
/// }
/// }
/// }
/// ```
///
/// [3]: https://spec.graphql.org/June2018/#sec-Objects
/// [3]: https://spec.graphql.org/October2021#sec-Objects
pub trait GraphQLType<S = DefaultScalarValue>: GraphQLValue<S>
where
S: ScalarValue,
@ -454,7 +452,7 @@ where
panic!(
"Field {} not found on type {:?}",
f.name.item,
meta_type.name()
meta_type.name(),
)
});
@ -474,8 +472,8 @@ where
f.arguments.as_ref().map(|m| {
m.item
.iter()
.map(|&(ref k, ref v)| {
(k.item, v.item.clone().into_const(exec_vars))
.filter_map(|&(ref k, ref v)| {
v.item.clone().into_const(exec_vars).map(|v| (k.item, v))
})
.collect()
}),
@ -608,7 +606,7 @@ where
.arguments
.iter()
.flat_map(|m| m.item.get("if"))
.flat_map(|v| v.item.clone().into_const(vars).convert())
.filter_map(|v| v.item.clone().into_const(vars)?.convert().ok())
.next()
.unwrap();

View file

@ -580,12 +580,11 @@ where
fn into_field_error(self) -> FieldError<S> {
const ERROR_PREFIX: &str = "Failed to convert into exact-size array";
match self {
Self::Null => format!("{}: Value cannot be `null`", ERROR_PREFIX).into(),
Self::WrongCount { actual, expected } => format!(
"{}: wrong elements count: {} instead of {}",
ERROR_PREFIX, actual, expected
)
.into(),
Self::Null => format!("{ERROR_PREFIX}: Value cannot be `null`").into(),
Self::WrongCount { actual, expected } => {
format!("{ERROR_PREFIX}: wrong elements count: {actual} instead of {expected}",)
.into()
}
Self::Item(s) => s.into_field_error(),
}
}

View file

@ -17,23 +17,23 @@ use crate::{GraphQLType, ScalarValue};
/// [GraphQL objects][1]. Other types ([scalars][2], [enums][3], [interfaces][4], [input objects][5]
/// and [unions][6]) are not allowed.
///
/// [1]: https://spec.graphql.org/June2018/#sec-Objects
/// [2]: https://spec.graphql.org/June2018/#sec-Scalars
/// [3]: https://spec.graphql.org/June2018/#sec-Enums
/// [4]: https://spec.graphql.org/June2018/#sec-Interfaces
/// [5]: https://spec.graphql.org/June2018/#sec-Input-Objects
/// [6]: https://spec.graphql.org/June2018/#sec-Unions
/// [1]: https://spec.graphql.org/October2021#sec-Objects
/// [2]: https://spec.graphql.org/October2021#sec-Scalars
/// [3]: https://spec.graphql.org/October2021#sec-Enums
/// [4]: https://spec.graphql.org/October2021#sec-Interfaces
/// [5]: https://spec.graphql.org/October2021#sec-Input-Objects
/// [6]: https://spec.graphql.org/October2021#sec-Unions
pub trait GraphQLObject<S: ScalarValue>: GraphQLType<S> {
/// An arbitrary function without meaning.
///
/// May contain compile timed check logic which ensures that types are used correctly according
/// to the [GraphQL specification][1].
///
/// [1]: https://spec.graphql.org/June2018/
/// [1]: https://spec.graphql.org/October2021
fn mark() {}
}
impl<'a, S, T> GraphQLObject<S> for &T
impl<S, T> GraphQLObject<S> for &T
where
T: GraphQLObject<S> + ?Sized,
S: ScalarValue,
@ -74,23 +74,23 @@ where
/// [GraphQL interfaces][1]. Other types ([scalars][2], [enums][3], [objects][4], [input objects][5]
/// and [unions][6]) are not allowed.
///
/// [1]: https://spec.graphql.org/June2018/#sec-Interfaces
/// [2]: https://spec.graphql.org/June2018/#sec-Scalars
/// [3]: https://spec.graphql.org/June2018/#sec-Enums
/// [4]: https://spec.graphql.org/June2018/#sec-Objects
/// [5]: https://spec.graphql.org/June2018/#sec-Input-Objects
/// [6]: https://spec.graphql.org/June2018/#sec-Unions
/// [1]: https://spec.graphql.org/October2021#sec-Interfaces
/// [2]: https://spec.graphql.org/October2021#sec-Scalars
/// [3]: https://spec.graphql.org/October2021#sec-Enums
/// [4]: https://spec.graphql.org/October2021#sec-Objects
/// [5]: https://spec.graphql.org/October2021#sec-Input-Objects
/// [6]: https://spec.graphql.org/October2021#sec-Unions
pub trait GraphQLInterface<S: ScalarValue>: GraphQLType<S> {
/// An arbitrary function without meaning.
///
/// May contain compile timed check logic which ensures that types are used correctly according
/// to the [GraphQL specification][1].
///
/// [1]: https://spec.graphql.org/June2018/
/// [1]: https://spec.graphql.org/October2021
fn mark() {}
}
impl<'a, S, T> GraphQLInterface<S> for &T
impl<S, T> GraphQLInterface<S> for &T
where
T: GraphQLInterface<S> + ?Sized,
S: ScalarValue,
@ -131,23 +131,23 @@ where
/// [GraphQL unions][1]. Other types ([scalars][2], [enums][3], [objects][4], [input objects][5] and
/// [interfaces][6]) are not allowed.
///
/// [1]: https://spec.graphql.org/June2018/#sec-Unions
/// [2]: https://spec.graphql.org/June2018/#sec-Scalars
/// [3]: https://spec.graphql.org/June2018/#sec-Enums
/// [4]: https://spec.graphql.org/June2018/#sec-Objects
/// [5]: https://spec.graphql.org/June2018/#sec-Input-Objects
/// [6]: https://spec.graphql.org/June2018/#sec-Interfaces
/// [1]: https://spec.graphql.org/October2021#sec-Unions
/// [2]: https://spec.graphql.org/October2021#sec-Scalars
/// [3]: https://spec.graphql.org/October2021#sec-Enums
/// [4]: https://spec.graphql.org/October2021#sec-Objects
/// [5]: https://spec.graphql.org/October2021#sec-Input-Objects
/// [6]: https://spec.graphql.org/October2021#sec-Interfaces
pub trait GraphQLUnion<S: ScalarValue>: GraphQLType<S> {
/// An arbitrary function without meaning.
///
/// May contain compile timed check logic which ensures that types are used correctly according
/// to the [GraphQL specification][1].
///
/// [1]: https://spec.graphql.org/June2018/
/// [1]: https://spec.graphql.org/October2021
fn mark() {}
}
impl<'a, S, T> GraphQLUnion<S> for &T
impl<S, T> GraphQLUnion<S> for &T
where
T: GraphQLUnion<S> + ?Sized,
S: ScalarValue,
@ -194,7 +194,7 @@ pub trait IsOutputType<S: ScalarValue>: GraphQLType<S> {
fn mark() {}
}
impl<'a, S, T> IsOutputType<S> for &T
impl<S, T> IsOutputType<S> for &T
where
T: IsOutputType<S> + ?Sized,
S: ScalarValue,
@ -282,7 +282,7 @@ where
}
}
impl<'a, S> IsOutputType<S> for str where S: ScalarValue {}
impl<S> IsOutputType<S> for str where S: ScalarValue {}
/// Marker trait for types which can be used as input types.
///
@ -298,7 +298,7 @@ pub trait IsInputType<S: ScalarValue>: GraphQLType<S> {
fn mark() {}
}
impl<'a, S, T> IsInputType<S> for &T
impl<S, T> IsInputType<S> for &T
where
T: IsInputType<S> + ?Sized,
S: ScalarValue,
@ -375,4 +375,4 @@ where
}
}
impl<'a, S> IsInputType<S> for str where S: ScalarValue {}
impl<S> IsInputType<S> for str where S: ScalarValue {}

View file

@ -50,11 +50,10 @@ impl FromStr for Name {
type Err = NameParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if Name::is_valid(s) {
Ok(Name(s.to_string()))
Ok(Name(s.into()))
} else {
Err(NameParseError(format!(
"Names must match /^[_a-zA-Z][_a-zA-Z0-9]*$/ but \"{}\" does not",
s
"Names must match /^[_a-zA-Z][_a-zA-Z0-9]*$/ but \"{s}\" does not",
)))
}
}

View file

@ -32,12 +32,16 @@ use crate::{
pub enum Nullable<T> {
/// No value
ImplicitNull,
/// No value, explicitly specified to be null
ExplicitNull,
/// Some value `T`
Some(T),
}
// Implemented manually to omit redundant `T: Default` trait bound, imposed by
// `#[derive(Default)]`.
impl<T> Default for Nullable<T> {
fn default() -> Self {
Self::ImplicitNull

View file

@ -265,7 +265,7 @@ where
}
}
impl<'e, S, T> GraphQLValueAsync<S> for Arc<T>
impl<S, T> GraphQLValueAsync<S> for Arc<T>
where
T: GraphQLValueAsync<S> + Send + ?Sized,
T::TypeInfo: Sync,

View file

@ -37,7 +37,7 @@ impl ID {
.map(str::to_owned)
.or_else(|| v.as_int_value().as_ref().map(ToString::to_string))
.map(Self)
.ok_or_else(|| format!("Expected `String` or `Int`, found: {}", v))
.ok_or_else(|| format!("Expected `String` or `Int`, found: {v}"))
}
}
@ -81,10 +81,10 @@ mod impl_string_scalar {
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<String, String> {
v.as_string_value()
.map(str::to_owned)
.ok_or_else(|| format!("Expected `String`, found: {}", v))
.ok_or_else(|| format!("Expected `String`, found: {v}"))
}
pub(super) fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<'_, S> {
pub(super) fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<S> {
if let ScalarToken::String(value) = value {
let mut ret = String::with_capacity(value.len());
let mut char_iter = value.chars();
@ -120,7 +120,7 @@ mod impl_string_scalar {
}
Some(s) => {
return Err(ParseError::LexerError(LexerError::UnknownEscapeSequence(
format!("\\{}", s),
format!("\\{s}"),
)))
}
None => return Err(ParseError::LexerError(LexerError::UnterminatedString)),
@ -132,12 +132,12 @@ mod impl_string_scalar {
}
Ok(ret.into())
} else {
Err(ParseError::UnexpectedToken(Token::Scalar(value)))
Err(ParseError::unexpected_token(Token::Scalar(value)))
}
}
}
fn parse_unicode_codepoint<'a, I>(char_iter: &mut I) -> Result<char, ParseError<'a>>
fn parse_unicode_codepoint<I>(char_iter: &mut I) -> Result<char, ParseError>
where
I: Iterator<Item = char>,
{
@ -149,19 +149,16 @@ where
.and_then(|c1| {
char_iter
.next()
.map(|c2| format!("{}{}", c1, c2))
.map(|c2| format!("{c1}{c2}"))
.ok_or_else(|| {
ParseError::LexerError(LexerError::UnknownEscapeSequence(format!("\\u{}", c1)))
ParseError::LexerError(LexerError::UnknownEscapeSequence(format!("\\u{c1}")))
})
})
.and_then(|mut s| {
char_iter
.next()
.ok_or_else(|| {
ParseError::LexerError(LexerError::UnknownEscapeSequence(format!(
"\\u{}",
s.clone()
)))
ParseError::LexerError(LexerError::UnknownEscapeSequence(format!("\\u{s}")))
})
.map(|c2| {
s.push(c2);
@ -172,10 +169,7 @@ where
char_iter
.next()
.ok_or_else(|| {
ParseError::LexerError(LexerError::UnknownEscapeSequence(format!(
"\\u{}",
s.clone()
)))
ParseError::LexerError(LexerError::UnknownEscapeSequence(format!("\\u{s}")))
})
.map(|c2| {
s.push(c2);
@ -184,14 +178,12 @@ where
})?;
let code_point = u32::from_str_radix(&escaped_code_point, 16).map_err(|_| {
ParseError::LexerError(LexerError::UnknownEscapeSequence(format!(
"\\u{}",
escaped_code_point
"\\u{escaped_code_point}",
)))
})?;
char::from_u32(code_point).ok_or_else(|| {
ParseError::LexerError(LexerError::UnknownEscapeSequence(format!(
"\\u{}",
escaped_code_point
"\\u{escaped_code_point}",
)))
})
}
@ -282,12 +274,12 @@ mod impl_boolean_scalar {
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<Boolean, String> {
v.as_scalar_value()
.and_then(ScalarValue::as_bool)
.ok_or_else(|| format!("Expected `Boolean`, found: {}", v))
.ok_or_else(|| format!("Expected `Boolean`, found: {v}"))
}
pub(super) fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<'_, S> {
pub(super) fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<S> {
// `Boolean`s are parsed separately, they shouldn't reach this code path.
Err(ParseError::UnexpectedToken(Token::Scalar(value)))
Err(ParseError::unexpected_token(Token::Scalar(value)))
}
}
@ -303,16 +295,16 @@ mod impl_int_scalar {
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<Int, String> {
v.as_int_value()
.ok_or_else(|| format!("Expected `Int`, found: {}", v))
.ok_or_else(|| format!("Expected `Int`, found: {v}"))
}
pub(super) fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<'_, S> {
pub(super) fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<S> {
if let ScalarToken::Int(v) = value {
v.parse()
.map_err(|_| ParseError::UnexpectedToken(Token::Scalar(value)))
.map_err(|_| ParseError::unexpected_token(Token::Scalar(value)))
.map(|s: i32| s.into())
} else {
Err(ParseError::UnexpectedToken(Token::Scalar(value)))
Err(ParseError::unexpected_token(Token::Scalar(value)))
}
}
}
@ -329,20 +321,20 @@ mod impl_float_scalar {
pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<Float, String> {
v.as_float_value()
.ok_or_else(|| format!("Expected `Float`, found: {}", v))
.ok_or_else(|| format!("Expected `Float`, found: {v}"))
}
pub(super) fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<'_, S> {
pub(super) fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<S> {
match value {
ScalarToken::Int(v) => v
.parse()
.map_err(|_| ParseError::UnexpectedToken(Token::Scalar(value)))
.map_err(|_| ParseError::unexpected_token(Token::Scalar(value)))
.map(|s: i32| f64::from(s).into()),
ScalarToken::Float(v) => v
.parse()
.map_err(|_| ParseError::UnexpectedToken(Token::Scalar(value)))
.map_err(|_| ParseError::unexpected_token(Token::Scalar(value)))
.map(|s: f64| s.into()),
ScalarToken::String(_) => Err(ParseError::UnexpectedToken(Token::Scalar(value))),
ScalarToken::String(_) => Err(ParseError::unexpected_token(Token::Scalar(value))),
}
}
}
@ -401,8 +393,9 @@ where
{
}
// Implemented manually to omit redundant `T: Default` trait bound, imposed by
// `#[derive(Default)]`.
impl<T> Default for EmptyMutation<T> {
#[inline]
fn default() -> Self {
Self::new()
}
@ -461,8 +454,9 @@ where
{
}
// Implemented manually to omit redundant `T: Default` trait bound, imposed by
// `#[derive(Default)]`.
impl<T> Default for EmptySubscription<T> {
#[inline]
fn default() -> Self {
Self::new()
}
@ -499,8 +493,8 @@ mod tests {
#[test]
fn test_id_display() {
let id = ID(String::from("foo"));
assert_eq!(format!("{}", id), "foo");
let id = ID("foo".into());
assert_eq!(id.to_string(), "foo");
}
#[test]
@ -508,7 +502,7 @@ mod tests {
fn parse_string(s: &str, expected: &str) {
let s =
<String as ParseScalarValue<DefaultScalarValue>>::from_str(ScalarToken::String(s));
assert!(s.is_ok(), "A parsing error occurred: {:?}", s);
assert!(s.is_ok(), "A parsing error occurred: {s:?}");
let s: Option<String> = s.unwrap().into();
assert!(s.is_some(), "No string returned");
assert_eq!(s.unwrap(), expected);
@ -527,7 +521,7 @@ mod tests {
#[test]
fn parse_f64_from_int() {
for (v, expected) in &[
for (v, expected) in [
("0", 0),
("128", 128),
("1601942400", 1601942400),
@ -538,14 +532,14 @@ mod tests {
assert!(n.is_ok(), "A parsing error occurred: {:?}", n.unwrap_err());
let n: Option<f64> = n.unwrap().into();
assert!(n.is_some(), "No f64 returned");
assert_eq!(n.unwrap(), f64::from(*expected));
assert!(n.is_some(), "No `f64` returned");
assert_eq!(n.unwrap(), f64::from(expected));
}
}
#[test]
fn parse_f64_from_float() {
for (v, expected) in &[
for (v, expected) in [
("0.", 0.),
("1.2", 1.2),
("1601942400.", 1601942400.),
@ -556,8 +550,8 @@ mod tests {
assert!(n.is_ok(), "A parsing error occurred: {:?}", n.unwrap_err());
let n: Option<f64> = n.unwrap().into();
assert!(n.is_some(), "No f64 returned");
assert_eq!(n.unwrap(), *expected);
assert!(n.is_some(), "No `f64` returned");
assert_eq!(n.unwrap(), expected);
}
}

View file

@ -94,8 +94,8 @@ pub trait SubscriptionConnection<S>: futures::Stream<Item = ExecutionOutput<S>>
///
/// See trait methods for more detailed explanation on how this trait works.
///
/// [1]: https://spec.graphql.org/June2018/#sec-Subscription
/// [2]: https://spec.graphql.org/June2018/#sec-Objects
/// [1]: https://spec.graphql.org/October2021#sec-Subscription
/// [2]: https://spec.graphql.org/October2021#sec-Objects
pub trait GraphQLSubscriptionValue<S = DefaultScalarValue>: GraphQLValue<S> + Sync
where
Self::TypeInfo: Sync,
@ -204,7 +204,7 @@ crate::sa::assert_obj_safe!(GraphQLSubscriptionValue<Context = (), TypeInfo = ()
/// It's automatically implemented for [`GraphQLSubscriptionValue`] and [`GraphQLType`]
/// implementers, so doesn't require manual or code-generated implementation.
///
/// [1]: https://spec.graphql.org/June2018/#sec-Subscription
/// [1]: https://spec.graphql.org/October2021#sec-Subscription
pub trait GraphQLSubscriptionType<S = DefaultScalarValue>:
GraphQLSubscriptionValue<S> + GraphQLType<S>
where
@ -316,7 +316,9 @@ where
f.arguments.as_ref().map(|m| {
m.item
.iter()
.map(|&(ref k, ref v)| (k.item, v.item.clone().into_const(exec_vars)))
.filter_map(|&(ref k, ref v)| {
v.item.clone().into_const(exec_vars).map(|v| (k.item, v))
})
.collect()
}),
&meta_field.arguments,

View file

@ -25,6 +25,7 @@ where
}
}
TypeType::List(ref inner, expected_size) => match *arg_value {
InputValue::Null | InputValue::Variable(_) => true,
InputValue::List(ref items) => {
if let Some(expected) = expected_size {
if items.len() != expected {
@ -71,7 +72,7 @@ where
let mut remaining_required_fields = input_fields
.iter()
.filter_map(|f| {
if f.arg_type.is_non_null() {
if f.arg_type.is_non_null() && f.default_value.is_none() {
Some(&f.name)
} else {
None

View file

@ -30,9 +30,9 @@ pub struct ValidatorContext<'a, S: Debug + 'a> {
impl RuleError {
#[doc(hidden)]
pub fn new(message: &str, locations: &[SourcePosition]) -> RuleError {
RuleError {
message: message.to_owned(),
pub fn new(message: &str, locations: &[SourcePosition]) -> Self {
Self {
message: message.into(),
locations: locations.to_vec(),
}
}
@ -53,14 +53,15 @@ impl RuleError {
impl fmt::Display for RuleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// this is fine since all `RuleError`s should have at least one source position
// This is fine since all `RuleError`s should have at least one source
// position.
let locations = self
.locations
.iter()
.map(|location| format!("{}", location))
.map(ToString::to_string)
.collect::<Vec<_>>()
.join(", ");
write!(f, "{}. At {}", self.message, locations)
write!(f, "{}. At {locations}", self.message)
}
}

View file

@ -99,7 +99,7 @@ where
var_name,
var_pos,
&path,
&format!(r#"Expected "{}", found null"#, meta_type),
format!(r#"Expected "{meta_type}", found null"#),
));
} else {
errors.append(&mut unify_value(
@ -121,10 +121,10 @@ where
var_name,
var_pos,
&path,
&format!(
"Expected list of {} elements, found {} elements",
expected,
l.len()
format!(
"Expected list of {expected} elements, \
found {} elements",
l.len(),
),
));
}
@ -168,11 +168,11 @@ where
var_name,
var_pos,
&path,
&format!(
"Expected input of type `{}`. Got: `{}`. \
format!(
"Expected input of type `{}`. \
Got: `{value}`. \
Details: {}",
iom.name,
value,
e.message(),
),
));
@ -205,10 +205,9 @@ where
var_name,
var_pos,
path,
&format!(
"Expected input scalar `{}`. Got: `{}`. Details: {}",
format!(
"Expected input scalar `{}`. Got: `{value}`. Details: {}",
meta.name,
value,
e.message(),
),
)];
@ -219,13 +218,13 @@ where
var_name,
var_pos,
path,
&format!(r#"Expected "{}", found list"#, meta.name),
format!(r#"Expected "{}", found list"#, meta.name),
)),
InputValue::Object(_) => errors.push(unification_error(
var_name,
var_pos,
path,
&format!(r#"Expected "{}", found object"#, meta.name),
format!(r#"Expected "{}", found object"#, meta.name),
)),
_ => (),
}
@ -244,27 +243,27 @@ where
{
let mut errors: Vec<RuleError> = vec![];
match *value {
match value {
// TODO: avoid this bad duplicate as_str() call. (value system refactor)
InputValue::Scalar(ref scalar) if scalar.as_str().is_some() => {
InputValue::Scalar(scalar) if scalar.as_str().is_some() => {
if let Some(name) = scalar.as_str() {
if !meta.values.iter().any(|ev| ev.name == *name) {
errors.push(unification_error(
var_name,
var_pos,
path,
&format!(r#"Invalid value for enum "{}""#, meta.name),
format!(r#"Invalid value for enum "{}""#, meta.name),
))
}
}
}
InputValue::Enum(ref name) => {
InputValue::Enum(name) => {
if !meta.values.iter().any(|ev| &ev.name == name) {
errors.push(unification_error(
var_name,
var_pos,
path,
&format!(r#"Invalid value for enum "{}""#, meta.name),
format!(r#"Invalid value for enum "{}""#, meta.name),
))
}
}
@ -272,7 +271,7 @@ where
var_name,
var_pos,
path,
&format!(r#"Expected "{}", found not a string or enum"#, meta.name),
format!(r#"Expected "{}", found not a string or enum"#, meta.name),
)),
}
errors
@ -318,7 +317,7 @@ where
var_name,
var_pos,
&Path::ObjectField(&input_field.name, path),
&format!(r#"Expected "{}", found null"#, input_field.arg_type),
format!(r#"Expected "{}", found null"#, input_field.arg_type),
));
}
}
@ -336,7 +335,7 @@ where
var_name,
var_pos,
path,
&format!(r#"Expected "{}", found not an object"#, meta.name),
format!(r#"Expected "{}", found not an object"#, meta.name),
));
}
errors
@ -349,17 +348,14 @@ where
v.map_or(true, InputValue::is_null)
}
fn unification_error<'a>(
var_name: &str,
fn unification_error(
var_name: impl fmt::Display,
var_pos: &SourcePosition,
path: &Path<'a>,
message: &str,
path: &Path<'_>,
message: impl fmt::Display,
) -> RuleError {
RuleError::new(
&format!(
r#"Variable "${}" got invalid value. {}{}."#,
var_name, path, message,
),
&format!(r#"Variable "${var_name}" got invalid value. {path}{message}."#),
&[*var_pos],
)
}
@ -368,8 +364,8 @@ impl<'a> fmt::Display for Path<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Path::Root => write!(f, ""),
Path::ArrayElement(idx, prev) => write!(f, "{}In element #{}: ", prev, idx),
Path::ObjectField(name, prev) => write!(f, r#"{}In field "{}": "#, prev, name),
Path::ArrayElement(idx, prev) => write!(f, "{prev}In element #{idx}: "),
Path::ObjectField(name, prev) => write!(f, r#"{prev}In field "{name}": "#),
}
}
}

View file

@ -1,3 +1,5 @@
use std::fmt;
use crate::{
ast::{Directive, Field, InputValue},
parser::Spanning,
@ -6,13 +8,12 @@ use crate::{
validation::{ValidatorContext, Visitor},
value::ScalarValue,
};
use std::fmt::Debug;
pub struct ArgumentsOfCorrectType<'a, S: Debug + 'a> {
pub struct ArgumentsOfCorrectType<'a, S: fmt::Debug + 'a> {
current_args: Option<&'a Vec<Argument<'a, S>>>,
}
pub fn factory<'a, S: Debug>() -> ArgumentsOfCorrectType<'a, S> {
pub fn factory<'a, S: fmt::Debug>() -> ArgumentsOfCorrectType<'a, S> {
ArgumentsOfCorrectType { current_args: None }
}
@ -59,7 +60,7 @@ where
if !is_valid_literal_value(ctx.schema, &meta_type, &arg_value.item) {
ctx.report_error(
&error_message(arg_name.item, &format!("{}", argument_meta.arg_type)),
&error_message(arg_name.item, &argument_meta.arg_type),
&[arg_value.start],
);
}
@ -67,11 +68,8 @@ where
}
}
fn error_message(arg_name: &str, type_name: &str) -> String {
format!(
"Invalid value for argument \"{}\", expected type \"{}\"",
arg_name, type_name
)
fn error_message(arg_name: impl fmt::Display, type_name: impl fmt::Display) -> String {
format!("Invalid value for argument \"{arg_name}\", expected type \"{type_name}\"",)
}
#[cfg(test)]
@ -85,7 +83,7 @@ mod tests {
};
#[test]
fn good_null_value() {
fn null_into_nullable_int() {
expect_passes_rule::<_, _, DefaultScalarValue>(
factory,
r#"
@ -98,6 +96,20 @@ mod tests {
);
}
#[test]
fn null_into_nullable_list() {
expect_passes_rule::<_, _, DefaultScalarValue>(
factory,
r#"
{
complicatedArgs {
stringListArgField(stringListArg: null)
}
}
"#,
);
}
#[test]
fn null_into_int() {
expect_fails_rule::<_, _, DefaultScalarValue>(
@ -116,6 +128,24 @@ mod tests {
);
}
#[test]
fn null_into_list() {
expect_fails_rule::<_, _, DefaultScalarValue>(
factory,
r#"
{
complicatedArgs {
nonNullStringListArgField(nonNullStringListArg: null)
}
}
"#,
&[RuleError::new(
&error_message("nonNullStringListArg", "[String!]!"),
&[SourcePosition::new(111, 3, 64)],
)],
);
}
#[test]
fn good_int_value() {
expect_passes_rule::<_, _, DefaultScalarValue>(

View file

@ -1,3 +1,5 @@
use std::fmt;
use crate::{
ast::VariableDefinition,
parser::Spanning,
@ -29,7 +31,7 @@ where
{
if var_def.var_type.item.is_non_null() {
ctx.report_error(
&non_null_error_message(var_name.item, &format!("{}", var_def.var_type.item)),
&non_null_error_message(var_name.item, &var_def.var_type.item),
&[*start],
)
} else {
@ -37,7 +39,7 @@ where
if !is_valid_literal_value(ctx.schema, &meta_type, var_value) {
ctx.report_error(
&type_error_message(var_name.item, &format!("{}", var_def.var_type.item)),
&type_error_message(var_name.item, &var_def.var_type.item),
&[*start],
);
}
@ -46,17 +48,14 @@ where
}
}
fn type_error_message(arg_name: &str, type_name: &str) -> String {
format!(
"Invalid default value for argument \"{}\", expected type \"{}\"",
arg_name, type_name
)
fn type_error_message(arg_name: impl fmt::Display, type_name: impl fmt::Display) -> String {
format!("Invalid default value for argument \"{arg_name}\", expected type \"{type_name}\"")
}
fn non_null_error_message(arg_name: &str, type_name: &str) -> String {
fn non_null_error_message(arg_name: impl fmt::Display, type_name: impl fmt::Display) -> String {
format!(
"Argument \"{}\" has type \"{}\" and is not nullable, so it can't have a default value",
arg_name, type_name
"Argument \"{arg_name}\" has type \"{type_name}\" and is not nullable, \
so it can't have a default value",
)
}

View file

@ -69,7 +69,7 @@ where
}
fn error_message(field: &str, type_name: &str) -> String {
format!(r#"Unknown field "{}" on type "{}""#, field, type_name)
format!(r#"Unknown field "{field}" on type "{type_name}""#)
}
#[cfg(test)]

View file

@ -59,15 +59,9 @@ where
fn error_message(fragment_name: Option<&str>, on_type: &str) -> String {
if let Some(name) = fragment_name {
format!(
r#"Fragment "{}" cannot condition non composite type "{}"#,
name, on_type
)
format!(r#"Fragment "{name}" cannot condition non composite type "{on_type}"#)
} else {
format!(
r#"Fragment cannot condition on non composite type "{}""#,
on_type
)
format!(r#"Fragment cannot condition on non composite type "{on_type}""#)
}
}

View file

@ -91,17 +91,11 @@ where
}
fn field_error_message(arg_name: &str, field_name: &str, type_name: &str) -> String {
format!(
r#"Unknown argument "{}" on field "{}" of type "{}""#,
arg_name, field_name, type_name
)
format!(r#"Unknown argument "{arg_name}" on field "{field_name}" of type "{type_name}""#)
}
fn directive_error_message(arg_name: &str, directive_name: &str) -> String {
format!(
r#"Unknown argument "{}" on directive "{}""#,
arg_name, directive_name
)
format!(r#"Unknown argument "{arg_name}" on directive "{directive_name}""#)
}
#[cfg(test)]

View file

@ -154,14 +154,11 @@ where
}
fn unknown_error_message(directive_name: &str) -> String {
format!(r#"Unknown directive "{}""#, directive_name)
format!(r#"Unknown directive "{directive_name}""#)
}
fn misplaced_error_message(directive_name: &str, location: &DirectiveLocation) -> String {
format!(
r#"Directive "{}" may not be used on {}"#,
directive_name, location
)
format!(r#"Directive "{directive_name}" may not be used on {location}"#)
}
#[cfg(test)]

View file

@ -28,7 +28,7 @@ where
}
fn error_message(frag_name: &str) -> String {
format!(r#"Unknown fragment: "{}""#, frag_name)
format!(r#"Unknown fragment: "{frag_name}""#)
}
#[cfg(test)]

View file

@ -56,7 +56,7 @@ fn validate_type<'a, S: Debug>(
}
fn error_message(type_name: &str) -> String {
format!(r#"Unknown type "{}""#, type_name)
format!(r#"Unknown type "{type_name}""#)
}
#[cfg(test)]

View file

@ -7,19 +7,6 @@ use crate::{
value::ScalarValue,
};
pub struct NoFragmentCycles<'a> {
current_fragment: Option<&'a str>,
spreads: HashMap<&'a str, Vec<Spanning<&'a str>>>,
fragment_order: Vec<&'a str>,
}
struct CycleDetector<'a> {
visited: HashSet<&'a str>,
spreads: &'a HashMap<&'a str, Vec<Spanning<&'a str>>>,
path_indices: HashMap<&'a str, usize>,
errors: Vec<RuleError>,
}
pub fn factory<'a>() -> NoFragmentCycles<'a> {
NoFragmentCycles {
current_fragment: None,
@ -28,6 +15,12 @@ pub fn factory<'a>() -> NoFragmentCycles<'a> {
}
}
pub struct NoFragmentCycles<'a> {
current_fragment: Option<&'a str>,
spreads: HashMap<&'a str, Vec<Spanning<&'a str>>>,
fragment_order: Vec<&'a str>,
}
impl<'a, S> Visitor<'a, S> for NoFragmentCycles<'a>
where
S: ScalarValue,
@ -38,14 +31,12 @@ where
let mut detector = CycleDetector {
visited: HashSet::new(),
spreads: &self.spreads,
path_indices: HashMap::new(),
errors: Vec::new(),
};
for frag in &self.fragment_order {
if !detector.visited.contains(frag) {
let mut path = Vec::new();
detector.detect_from(frag, &mut path);
detector.detect_from(frag);
}
}
@ -91,19 +82,46 @@ where
}
}
type CycleDetectorState<'a> = (&'a str, Vec<&'a Spanning<&'a str>>, HashMap<&'a str, usize>);
struct CycleDetector<'a> {
visited: HashSet<&'a str>,
spreads: &'a HashMap<&'a str, Vec<Spanning<&'a str>>>,
errors: Vec<RuleError>,
}
impl<'a> CycleDetector<'a> {
fn detect_from(&mut self, from: &'a str, path: &mut Vec<&'a Spanning<&'a str>>) {
fn detect_from(&mut self, from: &'a str) {
let mut to_visit = Vec::new();
to_visit.push((from, Vec::new(), HashMap::new()));
while let Some((from, path, path_indices)) = to_visit.pop() {
to_visit.extend(self.detect_from_inner(from, path, path_indices));
}
}
/// This function should be called only inside [`Self::detect_from()`], as
/// it's a recursive function using heap instead of a stack. So, instead of
/// the recursive call, we return a [`Vec`] that is visited inside
/// [`Self::detect_from()`].
fn detect_from_inner(
&mut self,
from: &'a str,
path: Vec<&'a Spanning<&'a str>>,
mut path_indices: HashMap<&'a str, usize>,
) -> Vec<CycleDetectorState<'a>> {
self.visited.insert(from);
if !self.spreads.contains_key(from) {
return;
return Vec::new();
}
self.path_indices.insert(from, path.len());
path_indices.insert(from, path.len());
let mut to_visit = Vec::new();
for node in &self.spreads[from] {
let name = &node.item;
let index = self.path_indices.get(name).cloned();
let name = node.item;
let index = path_indices.get(name).cloned();
if let Some(index) = index {
let err_pos = if index < path.len() {
@ -114,19 +132,19 @@ impl<'a> CycleDetector<'a> {
self.errors
.push(RuleError::new(&error_message(name), &[err_pos.start]));
} else if !self.visited.contains(name) {
} else {
let mut path = path.clone();
path.push(node);
self.detect_from(name, path);
path.pop();
to_visit.push((name, path, path_indices.clone()));
}
}
self.path_indices.remove(from);
to_visit
}
}
fn error_message(frag_name: &str) -> String {
format!(r#"Cannot spread fragment "{}""#, frag_name)
format!(r#"Cannot spread fragment "{frag_name}""#)
}
#[cfg(test)]

View file

@ -12,13 +12,6 @@ pub enum Scope<'a> {
Fragment(&'a str),
}
pub struct NoUndefinedVariables<'a> {
defined_variables: HashMap<Option<&'a str>, (SourcePosition, HashSet<&'a str>)>,
used_variables: HashMap<Scope<'a>, Vec<Spanning<&'a str>>>,
current_scope: Option<Scope<'a>>,
spreads: HashMap<Scope<'a>, Vec<&'a str>>,
}
pub fn factory<'a>() -> NoUndefinedVariables<'a> {
NoUndefinedVariables {
defined_variables: HashMap::new(),
@ -28,6 +21,13 @@ pub fn factory<'a>() -> NoUndefinedVariables<'a> {
}
}
pub struct NoUndefinedVariables<'a> {
defined_variables: HashMap<Option<&'a str>, (SourcePosition, HashSet<&'a str>)>,
used_variables: HashMap<Scope<'a>, Vec<Spanning<&'a str>>>,
current_scope: Option<Scope<'a>>,
spreads: HashMap<Scope<'a>, Vec<&'a str>>,
}
impl<'a> NoUndefinedVariables<'a> {
fn find_undef_vars(
&'a self,
@ -36,8 +36,34 @@ impl<'a> NoUndefinedVariables<'a> {
unused: &mut Vec<&'a Spanning<&'a str>>,
visited: &mut HashSet<Scope<'a>>,
) {
let mut to_visit = Vec::new();
if let Some(spreads) = self.find_undef_vars_inner(scope, defined, unused, visited) {
to_visit.push(spreads);
}
while let Some(spreads) = to_visit.pop() {
for spread in spreads {
if let Some(spreads) =
self.find_undef_vars_inner(&Scope::Fragment(spread), defined, unused, visited)
{
to_visit.push(spreads);
}
}
}
}
/// This function should be called only inside [`Self::find_undef_vars()`],
/// as it's a recursive function using heap instead of a stack. So, instead
/// of the recursive call, we return a [`Vec`] that is visited inside
/// [`Self::find_undef_vars()`].
fn find_undef_vars_inner(
&'a self,
scope: &Scope<'a>,
defined: &HashSet<&'a str>,
unused: &mut Vec<&'a Spanning<&'a str>>,
visited: &mut HashSet<Scope<'a>>,
) -> Option<&'a Vec<&'a str>> {
if visited.contains(scope) {
return;
return None;
}
visited.insert(scope.clone());
@ -50,11 +76,7 @@ impl<'a> NoUndefinedVariables<'a> {
}
}
if let Some(spreads) = self.spreads.get(scope) {
for spread in spreads {
self.find_undef_vars(&Scope::Fragment(spread), defined, unused, visited);
}
}
self.spreads.get(scope)
}
}
@ -151,12 +173,9 @@ where
fn error_message(var_name: &str, op_name: Option<&str>) -> String {
if let Some(op_name) = op_name {
format!(
r#"Variable "${}" is not defined by operation "{}""#,
var_name, op_name
)
format!(r#"Variable "${var_name}" is not defined by operation "{op_name}""#)
} else {
format!(r#"Variable "${}" is not defined"#, var_name)
format!(r#"Variable "${var_name}" is not defined"#)
}
}

View file

@ -7,18 +7,12 @@ use crate::{
value::ScalarValue,
};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Scope<'a> {
Operation(Option<&'a str>),
Fragment(&'a str),
}
pub struct NoUnusedFragments<'a> {
spreads: HashMap<Scope<'a>, Vec<&'a str>>,
defined_fragments: HashSet<Spanning<&'a str>>,
current_scope: Option<Scope<'a>>,
}
pub fn factory<'a>() -> NoUnusedFragments<'a> {
NoUnusedFragments {
spreads: HashMap::new(),
@ -27,21 +21,42 @@ pub fn factory<'a>() -> NoUnusedFragments<'a> {
}
}
pub struct NoUnusedFragments<'a> {
spreads: HashMap<Scope<'a>, Vec<&'a str>>,
defined_fragments: HashSet<Spanning<&'a str>>,
current_scope: Option<Scope<'a>>,
}
impl<'a> NoUnusedFragments<'a> {
fn find_reachable_fragments(&self, from: &Scope<'a>, result: &mut HashSet<&'a str>) {
if let Scope::Fragment(name) = *from {
fn find_reachable_fragments(&'a self, from: Scope<'a>, result: &mut HashSet<&'a str>) {
let mut to_visit = Vec::new();
to_visit.push(from);
while let Some(from) = to_visit.pop() {
if let Some(next) = self.find_reachable_fragments_inner(from, result) {
to_visit.extend(next.iter().map(|s| Scope::Fragment(s)));
}
}
}
/// This function should be called only inside
/// [`Self::find_reachable_fragments()`], as it's a recursive function using
/// heap instead of a stack. So, instead of the recursive call, we return a
/// [`Vec`] that is visited inside [`Self::find_reachable_fragments()`].
fn find_reachable_fragments_inner(
&'a self,
from: Scope<'a>,
result: &mut HashSet<&'a str>,
) -> Option<&'a Vec<&'a str>> {
if let Scope::Fragment(name) = from {
if result.contains(name) {
return;
return None;
} else {
result.insert(name);
}
}
if let Some(spreads) = self.spreads.get(from) {
for spread in spreads {
self.find_reachable_fragments(&Scope::Fragment(spread), result)
}
}
self.spreads.get(&from)
}
}
@ -59,7 +74,7 @@ where
}) = *def
{
let op_name = name.as_ref().map(|s| s.item);
self.find_reachable_fragments(&Scope::Operation(op_name), &mut reachable);
self.find_reachable_fragments(Scope::Operation(op_name), &mut reachable);
}
}
@ -96,7 +111,7 @@ where
) {
if let Some(ref scope) = self.current_scope {
self.spreads
.entry(scope.clone())
.entry(*scope)
.or_insert_with(Vec::new)
.push(spread.item.name.item);
}
@ -104,7 +119,7 @@ where
}
fn error_message(frag_name: &str) -> String {
format!(r#"Fragment "{}" is never used"#, frag_name)
format!(r#"Fragment "{frag_name}" is never used"#)
}
#[cfg(test)]

Some files were not shown because too many files have changed in this diff Show more