diff --git a/.github/workflows/elixir.yml b/.github/workflows/elixir.yml
index c79946a104..7eefc16d13 100644
--- a/.github/workflows/elixir.yml
+++ b/.github/workflows/elixir.yml
@@ -9,24 +9,19 @@ on:
jobs:
test:
name: Elixir ${{matrix.elixir}} / OTP ${{matrix.otp}}
- runs-on: ubuntu-latest
+ runs-on: ubuntu-20.04
strategy:
matrix:
elixir:
- - "1.10"
- - "1.11"
- "1.12"
- "1.13"
+ - "1.14"
otp:
- - "22"
- "23"
- "24"
- exclude:
- - elixir: "1.10"
- otp: "24"
include:
- - elixir: "1.13"
+ - elixir: "1.14"
otp: "24"
format: true
@@ -35,7 +30,7 @@ jobs:
uses: actions/checkout@v2
- name: Set up Elixir
- uses: erlef/setup-elixir@v1
+ uses: erlef/setup-beam@v1
with:
elixir-version: ${{ matrix.elixir }}
otp-version: ${{ matrix.otp }}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ad5e14a799..c20ce375f5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,19 +4,30 @@
- Bug Fix: [Validate type references for invalid wrapped types](https://github.com/absinthe-graphql/absinthe/pull/1195)
- Feature: [Add `specifiedBy` type system directive](https://github.com/absinthe-graphql/absinthe/pull/1193)
+- Bug Fix: [Object type extensions may be empty](https://github.com/absinthe-graphql/absinthe/pull/1228)
+- Bug Fix: [Validate input object not being an Enum](https://github.com/absinthe-graphql/absinthe/pull/1231)
+- Bug Fix: [Deduplicate directives when building schema](https://github.com/absinthe-graphql/absinthe/pull/1242)
+
+## 1.7.1
- Breaking Bugfix: [Validate repeatable directives on schemas](https://github.com/absinthe-graphql/absinthe/pull/1179)
+- Breaking Bugfix: [Add "Objects must define fields" schema validation](https://github.com/absinthe-graphql/absinthe/pull/1167)
+- Bug Fix: [Validate field identifier uniqueness](https://github.com/absinthe-graphql/absinthe/pull/1200)
+- Bug Fix: [Validate type references for invalid wrapped types](https://github.com/absinthe-graphql/absinthe/pull/1195)
- Bug Fix: Adds **optional fix** for non compliant built-in scalar Int type. `use Absinthe.Schema, use_spec_compliant_int_scalar: true` in your schema to use the fixed Int type. It is also advisable to upgrade for custom types if you are leveraging the use of integers outside the GraphQl standard. [#1131](https://github.com/absinthe-graphql/absinthe/pull/1131).
- Feature: [Support error tuples when scalar parsing fails](https://github.com/absinthe-graphql/absinthe/pull/1187)
- Feature: [Convert SDL Language.\* structs to SDL notation](https://github.com/absinthe-graphql/absinthe/pull/1160)
+- Feature: [Support passing the resolution struct to dataloader helper callbacks](https://github.com/absinthe-graphql/absinthe/pull/1211)
- Feature: [Add support for type extensions](https://github.com/absinthe-graphql/absinthe/pull/1157)
- Bug Fix: [Add type system directives to introspection results](https://github.com/absinthe-graphql/absinthe/pull/1189)
- Bug Fix: [Add `__private__` field to EnumValueDefinition](https://github.com/absinthe-graphql/absinthe/pull/1148)
- Bug Fix: [Fix bug in Schema.**absinthe_types**(:all) for Persistent Term](https://github.com/absinthe-graphql/absinthe/pull/1161)
- Bug Fix: [Fix default enum value check for SDL schema's](https://github.com/absinthe-graphql/absinthe/pull/1188)
-- Bug Fix: [Add "Objects must define fields" schema validation](https://github.com/absinthe-graphql/absinthe/pull/1167)
- Feature: [Add `import_directives` macro](https://github.com/absinthe-graphql/absinthe/pull/1158)
- Feature: [Support type extensions on schema declarations](https://github.com/absinthe-graphql/absinthe/pull/1176)
- Bug Fix: [Root objects are marked as referenced correctly](https://github.com/absinthe-graphql/absinthe/pull/1186)
+- Bug Fix: [Prevent DDOS attacks with long queries](https://github.com/absinthe-graphql/absinthe/pull/1220)
+- Feature: [pipeline_modifier option to Absinthe.run/3](https://github.com/absinthe-graphql/absinthe/pull/1221)
+- Bug Fix: [Add end_time_mono to telemetry :stop events](https://github.com/absinthe-graphql/absinthe/pull/1174)
## 1.7.0
diff --git a/guides/complexity-analysis.md b/guides/complexity-analysis.md
index 8fddec67c0..96713007ad 100644
--- a/guides/complexity-analysis.md
+++ b/guides/complexity-analysis.md
@@ -1,7 +1,19 @@
-# Complexity Analysis
+# Safety Limits
A misbehaving client might send a very complex GraphQL query that would require
-considerable resources to handle. In order to protect against this scenario, the
+considerable resources to handle. There are two variations of this problem:
+
+- Complex queries that overwhelm resolution resources.
+- Extremely long queries that could take considerable resources to parse.
+(For example, an attacker could craft a long query including thousands of
+undefined fields or directives.)
+
+Either of these could be a vector for a denial-of-service attack in any GraphQL
+service. Absinthe includes mechanisms to protect services each of these.
+
+## Complexity Analysis
+
+To protect against queries that could overwhelm available resources, the
complexity of a query can be estimated before it is resolved and limited to a
specified maximum.
@@ -105,3 +117,11 @@ But this, at a complexity of `60`, wouldn't:
If a document's calculated complexity exceeds the configured limit, resolution
will be skipped and an error will be returned in the result detailing the
calculated and maximum complexities.
+
+## Token Limits
+
+To protect a service from invalid queries that could take considerable resources to parse,
+Absinthe offers the option to configure a maximum limit on tokens in the GraphQL request document.
+If the lexer encounters more tokens than this, it will stop the parse phase and return a phase error
+with the message `"Token limit exceeded"`. This limit is `:infinity` by default (no limit)
+and can be overridden by providing an integer to the option `token_limit` to `Absinthe.run`.
diff --git a/guides/introspection.md b/guides/introspection.md
index a61c935946..e4c59ee551 100644
--- a/guides/introspection.md
+++ b/guides/introspection.md
@@ -123,9 +123,3 @@ end
If you'd prefer to use a desktop application, we recommend using the pre-built
[Electron](https://electron.atom.io)-based wrapper application,
[GraphiQL.app](https://github.com/skevy/graphiql-app).
-
-### GraphQL Hub
-
-[GraphQL Hub](https://www.graphqlhub.com/) is an interesting website that you
-can use to introspect a number of public GraphQL servers, using GraphiQL in the
-browser and providing useful examples.
diff --git a/guides/subscriptions.md b/guides/subscriptions.md
index 9deeb22547..11030ed282 100644
--- a/guides/subscriptions.md
+++ b/guides/subscriptions.md
@@ -45,8 +45,17 @@ line:
Supervisor.start_link(children, opts)
```
+See `Absinthe.Subscription.child_spec/1` for more information on the supported
+options.
+
In your `MyAppWeb.Endpoint` module add:
+```elixir
+use Absinthe.Phoenix.Endpoint
+```
+
+Now, you need to configure your socket. I.e. in your `MyAppWeb.UserSocket` module add:
+
```elixir
use Absinthe.Phoenix.Socket,
schema: MyAppWeb.Schema
diff --git a/guides/telemetry.md b/guides/telemetry.md
index 1a1b62bb4c..304691ef3e 100644
--- a/guides/telemetry.md
+++ b/guides/telemetry.md
@@ -62,3 +62,29 @@ After a query is executed, you'll see something like:
}
}
```
+
+## Opentelemetry
+
+When using Opentelemetry, one usually wants to correlate spans that are created
+in spawned tasks with the main trace. For example, you might have a trace started
+in a Phoenix endpoint, and then have spans around database access.
+
+One can correlate manually by attaching the OTel context the task function:
+
+```elixir
+ctx = OpenTelemetry.Ctx.get_current()
+
+Task.async(fn ->
+ OpenTelemetry.Ctx.attach(ctx)
+
+ # do stuff that might create spans
+end)
+```
+
+When using the `async` and `batch` middleware, the tasks are spawned by Absinthe,
+so you can't attach the context manually.
+
+Instead, you can add the `:opentelemetry_process_propagator` package to your
+dependencies, which has a `Task.async/1` wrapper that will attach the context
+automatically. If the package is installed, the middleware will use it in place
+of the default `Task.async/1`.
diff --git a/guides/tutorial/our-first-query.md b/guides/tutorial/our-first-query.md
index 8cf676d5b1..b51bb0f28b 100644
--- a/guides/tutorial/our-first-query.md
+++ b/guides/tutorial/our-first-query.md
@@ -156,7 +156,7 @@ Once it's up-and-running, take a look at [http://localhost:4000/api/graphiql](ht
Make sure that the `URL` is pointing to the correct place and press the play button. If everything goes according to plan, you should see something like this:
-
+
## Next Step
diff --git a/guides/tutorial/subscriptions.md b/guides/tutorial/subscriptions.md
index b591397992..f2a5dcfddc 100644
--- a/guides/tutorial/subscriptions.md
+++ b/guides/tutorial/subscriptions.md
@@ -33,7 +33,7 @@ In `lib/blog/application.ex`:
children = [
# other children ...
{BlogWeb.Endpoint, []}, # this line should already exist
- {Absinthe.Subscription, [BlogWeb.Endpoint]}, # add this line
+ {Absinthe.Subscription, BlogWeb.Endpoint}, # add this line
# other children ...
]
```
diff --git a/lib/absinthe.ex b/lib/absinthe.ex
index 122afb8bb0..5ed484cdc2 100644
--- a/lib/absinthe.ex
+++ b/lib/absinthe.ex
@@ -49,6 +49,8 @@ defmodule Absinthe do
| %{data: nil | result_selection_t, errors: [result_error_t]}
| %{errors: [result_error_t]}
+ @type pipeline_modifier_fun :: (Absinthe.Pipeline.t(), Keyword.t() -> Absinthe.Pipeline.t())
+
@doc """
Evaluates a query document against a schema, with options.
@@ -92,7 +94,8 @@ defmodule Absinthe do
operation_name: String.t(),
analyze_complexity: boolean,
variables: %{optional(String.t()) => any()},
- max_complexity: non_neg_integer | :infinity
+ max_complexity: non_neg_integer | :infinity,
+ pipeline_modifier: pipeline_modifier_fun()
]
@type run_result :: {:ok, result_t} | {:error, String.t()}
@@ -103,9 +106,12 @@ defmodule Absinthe do
run_opts
) :: run_result
def run(document, schema, options \\ []) do
+ pipeline_modifier = options[:pipeline_modifier] || (&pipeline_identity/2)
+
pipeline =
schema
|> Absinthe.Pipeline.for_document(options)
+ |> pipeline_modifier.(options)
case Absinthe.Pipeline.run(document, pipeline) do
{:ok, %{result: result}, _phases} ->
@@ -126,7 +132,7 @@ defmodule Absinthe do
@spec run!(
binary | Absinthe.Language.Source.t() | Absinthe.Language.Document.t(),
Absinthe.Schema.t(),
- Keyword.t()
+ run_opts
) :: result_t | no_return
def run!(input, schema, options \\ []) do
case run(input, schema, options) do
@@ -134,4 +140,6 @@ defmodule Absinthe do
{:error, err} -> raise ExecutionError, message: err
end
end
+
+ defp pipeline_identity(pipeline, _options), do: pipeline
end
diff --git a/lib/absinthe/lexer.ex b/lib/absinthe/lexer.ex
index a481e53c12..c637ff352e 100644
--- a/lib/absinthe/lexer.ex
+++ b/lib/absinthe/lexer.ex
@@ -227,13 +227,19 @@ defmodule Absinthe.Lexer do
{:cont, context}
end
- @spec tokenize(binary()) :: {:ok, [any()]} | {:error, binary(), {integer(), non_neg_integer()}}
- def tokenize(input) do
+ @spec tokenize(binary(), Keyword.t()) ::
+ {:ok, [any()]} | {:error, binary(), {integer(), non_neg_integer()}}
+ def tokenize(input, options \\ []) do
lines = String.split(input, ~r/\r?\n/)
- case do_tokenize(input) do
+ tokenize_opts = [context: %{token_limit: Keyword.get(options, :token_limit, :infinity)}]
+
+ case do_tokenize(input, tokenize_opts) do
+ {:error, :stopped_at_token_limit, _, _, _, _} ->
+ {:error, :exceeded_token_limit}
+
{:ok, tokens, "", _, _, _} ->
- tokens = Enum.map(tokens, &convert_token_column(&1, lines))
+ tokens = convert_token_columns_from_byte_to_char(tokens, lines)
{:ok, tokens}
{:ok, _, rest, _, {line, line_offset}, byte_offset} ->
@@ -242,12 +248,88 @@ defmodule Absinthe.Lexer do
end
end
- defp convert_token_column({ident, loc, data}, lines) do
- {ident, byte_loc_to_char_loc(loc, lines), data}
+ defp convert_token_columns_from_byte_to_char(tokens, [first_line | next_lines]) do
+ initial_cursor_state = %{
+ line_num_cursor: 1,
+ current_line_substring: first_line,
+ current_line_char_offset: 1,
+ current_line_byte_offset: 1,
+ next_lines: next_lines
+ }
+
+ Enum.map_reduce(tokens, initial_cursor_state, fn current_token, cursor_state ->
+ {token_line_num, token_byte_col} =
+ case current_token do
+ {_, {token_line_num, token_byte_col}, _} -> {token_line_num, token_byte_col}
+ {_, {token_line_num, token_byte_col}} -> {token_line_num, token_byte_col}
+ end
+
+ cursor_state = maybe_move_cursor_to_next_line(cursor_state, token_line_num)
+
+ adjusted_byte_col = token_byte_col - cursor_state.current_line_byte_offset
+
+ line_part_from_prev_to_current_token =
+ binary_part(cursor_state.current_line_substring, 0, adjusted_byte_col)
+
+ token_char_col =
+ String.length(line_part_from_prev_to_current_token) +
+ cursor_state.current_line_char_offset
+
+ updated_line_substring =
+ binary_part(
+ cursor_state.current_line_substring,
+ adjusted_byte_col,
+ byte_size(cursor_state.current_line_substring) - adjusted_byte_col
+ )
+
+ next_cursor_state =
+ cursor_state
+ |> Map.put(:current_line_substring, updated_line_substring)
+ |> Map.put(:current_line_byte_offset, token_byte_col)
+ |> Map.put(:current_line_char_offset, token_char_col)
+
+ result =
+ case current_token do
+ {ident, _, data} -> {ident, {token_line_num, token_char_col}, data}
+ {ident, _} -> {ident, {token_line_num, token_char_col}}
+ end
+
+ {result, next_cursor_state}
+ end)
+ |> case do
+ {results, _} -> results
+ end
end
- defp convert_token_column({ident, loc}, lines) do
- {ident, byte_loc_to_char_loc(loc, lines)}
+ defp maybe_move_cursor_to_next_line(
+ %{line_num_cursor: line_num_cursor} = cursor_state,
+ token_line_num
+ )
+ when line_num_cursor == token_line_num,
+ do: cursor_state
+
+ defp maybe_move_cursor_to_next_line(
+ %{line_num_cursor: line_num_cursor} = cursor_state,
+ token_line_num
+ )
+ when line_num_cursor < token_line_num,
+ do: move_cursor_to_next_line(cursor_state, token_line_num)
+
+ defp move_cursor_to_next_line(
+ %{line_num_cursor: line_num_cursor, next_lines: next_lines} = _cursor_state,
+ token_line_num
+ ) do
+ {_completed, unprocessed_lines} = Enum.split(next_lines, token_line_num - line_num_cursor - 1)
+
+ [current_line | next_lines] = unprocessed_lines
+
+ %{
+ line_num_cursor: token_line_num,
+ current_line_substring: current_line,
+ current_line_char_offset: 1,
+ current_line_byte_offset: 1,
+ next_lines: next_lines
+ }
end
defp byte_loc_to_char_loc({line, byte_col}, lines) do
@@ -264,7 +346,6 @@ defmodule Absinthe.Lexer do
repeat(
choice([
ignore(ignored),
- comment,
punctuator,
block_string_value,
string_value,
@@ -310,7 +391,19 @@ defmodule Absinthe.Lexer do
union
) |> Enum.map(&String.to_charlist/1)
+ defp boolean_value_or_name_or_reserved_word(
+ _,
+ _,
+ %{token_count: count, token_limit: limit} = _context,
+ _,
+ _
+ )
+ when count >= limit do
+ {:error, :stopped_at_token_limit}
+ end
+
defp boolean_value_or_name_or_reserved_word(rest, chars, context, loc, byte_offset) do
+ context = Map.update(context, :token_count, 1, &(&1 + 1))
value = chars |> Enum.reverse()
do_boolean_value_or_name_or_reserved_word(rest, value, context, loc, byte_offset)
end
@@ -330,7 +423,12 @@ defmodule Absinthe.Lexer do
{rest, [{:name, line_and_column(loc, byte_offset, length(value)), value}], context}
end
+ defp labeled_token(_, _, %{token_count: count, token_limit: limit} = _context, _, _, _)
+ when count >= limit,
+ do: {:error, :stopped_at_token_limit}
+
defp labeled_token(rest, chars, context, loc, byte_offset, token_name) do
+ context = Map.update(context, :token_count, 1, &(&1 + 1))
value = chars |> Enum.reverse()
{rest, [{token_name, line_and_column(loc, byte_offset, length(value)), value}], context}
end
@@ -343,21 +441,38 @@ defmodule Absinthe.Lexer do
{rest, [], Map.put(context, :token_location, line_and_column(loc, byte_offset, 3))}
end
+ defp block_string_value_token(_, _, %{token_count: count, token_limit: limit} = _context, _, _)
+ when count >= limit,
+ do: {:error, :stopped_at_token_limit}
+
defp block_string_value_token(rest, chars, context, _loc, _byte_offset) do
+ context = Map.update(context, :token_count, 1, &(&1 + 1))
value = '"""' ++ (chars |> Enum.reverse()) ++ '"""'
{rest, [{:block_string_value, context.token_location, value}],
Map.delete(context, :token_location)}
end
+ defp string_value_token(_, _, %{token_count: count, token_limit: limit} = _context, _, _)
+ when count >= limit,
+ do: {:error, :stopped_at_token_limit}
+
defp string_value_token(rest, chars, context, _loc, _byte_offset) do
+ context = Map.update(context, :token_count, 1, &(&1 + 1))
value = '"' ++ tl(chars |> Enum.reverse()) ++ '"'
{rest, [{:string_value, context.token_location, value}], Map.delete(context, :token_location)}
end
+ defp atom_token(_, _, %{token_count: count, token_limit: limit} = _context, _, _)
+ when count >= limit do
+ {:error, :stopped_at_token_limit}
+ end
+
defp atom_token(rest, chars, context, loc, byte_offset) do
+ context = Map.update(context, :token_count, 1, &(&1 + 1))
value = chars |> Enum.reverse()
token_atom = value |> List.to_atom()
+
{rest, [{token_atom, line_and_column(loc, byte_offset, length(value))}], context}
end
diff --git a/lib/absinthe/middleware/async.ex b/lib/absinthe/middleware/async.ex
index b56c95d57c..c121af0481 100644
--- a/lib/absinthe/middleware/async.ex
+++ b/lib/absinthe/middleware/async.ex
@@ -52,7 +52,7 @@ defmodule Absinthe.Middleware.Async do
# task so we have actual data. Thus, we prepend this module to the middleware stack.
def call(%{state: :unresolved} = res, {fun, opts}) when is_function(fun) do
task =
- Task.async(fn ->
+ async(fn ->
:telemetry.span([:absinthe, :middleware, :async, :task], %{}, fn -> {fun.(), %{}} end)
end)
@@ -110,4 +110,13 @@ defmodule Absinthe.Middleware.Async do
pipeline
end
end
+
+ # Optionally use `async/1` function from `opentelemetry_process_propagator` if available
+ if Code.ensure_loaded?(OpentelemetryProcessPropagator.Task) do
+ @spec async((() -> any)) :: Task.t()
+ defdelegate async(fun), to: OpentelemetryProcessPropagator.Task
+ else
+ @spec async((() -> any)) :: Task.t()
+ defdelegate async(fun), to: Task
+ end
end
diff --git a/lib/absinthe/middleware/batch.ex b/lib/absinthe/middleware/batch.ex
index e6334feafc..4ab60719b1 100644
--- a/lib/absinthe/middleware/batch.ex
+++ b/lib/absinthe/middleware/batch.ex
@@ -137,7 +137,7 @@ defmodule Absinthe.Middleware.Batch do
start_time_mono = System.monotonic_time()
task =
- Task.async(fn ->
+ async(fn ->
{batch_fun, call_batch_fun(batch_fun, batch_data)}
end)
@@ -206,4 +206,13 @@ defmodule Absinthe.Middleware.Batch do
pipeline
end
end
+
+ # Optionally use `async/1` function from `opentelemetry_process_propagator` if available
+ if Code.ensure_loaded?(OpentelemetryProcessPropagator.Task) do
+ @spec async((() -> any)) :: Task.t()
+ defdelegate async(fun), to: OpentelemetryProcessPropagator.Task
+ else
+ @spec async((() -> any)) :: Task.t()
+ defdelegate async(fun), to: Task
+ end
end
diff --git a/lib/absinthe/phase/document/arguments/parse.ex b/lib/absinthe/phase/document/arguments/parse.ex
index e163d05000..880a432d6e 100644
--- a/lib/absinthe/phase/document/arguments/parse.ex
+++ b/lib/absinthe/phase/document/arguments/parse.ex
@@ -97,7 +97,7 @@ defmodule Absinthe.Phase.Document.Arguments.Parse do
end
defp build_value(%{__struct__: struct}, %Type.InputObject{}, _)
- when struct in [Input.Boolean, Input.Float, Input.Integer, Input.String] do
+ when struct in [Input.Boolean, Input.Float, Input.Integer, Input.String, Input.Enum] do
{:error, :bad_parse}
end
diff --git a/lib/absinthe/phase/document/execution/resolution.ex b/lib/absinthe/phase/document/execution/resolution.ex
index bde57714c7..67df033ebd 100644
--- a/lib/absinthe/phase/document/execution/resolution.ex
+++ b/lib/absinthe/phase/document/execution/resolution.ex
@@ -111,10 +111,13 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do
# walk list results
defp walk_results([value | values], bp_node, inner_type, res, [i | sub_path] = path, acc) do
- {result, res} = walk_result(value, bp_node, inner_type, res, path)
+ {result, res} = walk_result(value, bp_node, inner_type, %{res | path: path}, path)
walk_results(values, bp_node, inner_type, res, [i + 1 | sub_path], [result | acc])
end
+ defp walk_results([], _, _, res = %{path: [_ | sub_path]}, _, acc),
+ do: {:lists.reverse(acc), %{res | path: sub_path}}
+
defp walk_results([], _, _, res, _, acc), do: {:lists.reverse(acc), res}
defp resolve_fields(parent, res, source, path) do
@@ -139,7 +142,8 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do
res = %{res | fields_cache: fields_cache}
- do_resolve_fields(fields, res, source, parent_type, path, [])
+ {values, res} = do_resolve_fields(fields, res, source, parent_type, path, [])
+ {values, %{res | path: path}}
end
end
@@ -281,6 +285,15 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do
|> propagate_null_trimming
end
+ defp maybe_add_non_null_error([], values, %Type.NonNull{of_type: %Type.List{}}) do
+ values
+ |> Enum.with_index()
+ |> Enum.filter(&is_nil(elem(&1, 0)))
+ |> Enum.map(fn {_value, index} ->
+ %{message: "Cannot return null for non-nullable field", path: [index]}
+ end)
+ end
+
defp maybe_add_non_null_error([], nil, %Type.NonNull{}) do
["Cannot return null for non-nullable field"]
end
@@ -310,11 +323,7 @@ defmodule Absinthe.Phase.Document.Execution.Resolution do
nil
|> to_result(bp_field, full_type, node.extensions)
- |> Map.put(:errors, bad_child.errors)
-
- # ^ We don't have to worry about clobbering the current node's errors because,
- # if it had any errors, it wouldn't have any children and we wouldn't be
- # here anyway.
+ |> Map.put(:errors, node.errors ++ bad_child.errors)
else
node
end
diff --git a/lib/absinthe/phase/document/validation/known_directives.ex b/lib/absinthe/phase/document/validation/known_directives.ex
index 2a84b7f406..32c9a9a36c 100644
--- a/lib/absinthe/phase/document/validation/known_directives.ex
+++ b/lib/absinthe/phase/document/validation/known_directives.ex
@@ -4,7 +4,6 @@ defmodule Absinthe.Phase.Document.Validation.KnownDirectives do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/known_fragment_names.ex b/lib/absinthe/phase/document/validation/known_fragment_names.ex
index 4e8dbdbfc8..877fb98f63 100644
--- a/lib/absinthe/phase/document/validation/known_fragment_names.ex
+++ b/lib/absinthe/phase/document/validation/known_fragment_names.ex
@@ -7,7 +7,6 @@ defmodule Absinthe.Phase.Document.Validation.KnownFragmentNames do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/lone_anonymous_operation.ex b/lib/absinthe/phase/document/validation/lone_anonymous_operation.ex
index 857bf4a238..652152bdb1 100644
--- a/lib/absinthe/phase/document/validation/lone_anonymous_operation.ex
+++ b/lib/absinthe/phase/document/validation/lone_anonymous_operation.ex
@@ -7,7 +7,6 @@ defmodule Absinthe.Phase.Document.Validation.LoneAnonymousOperation do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/no_undefined_variables.ex b/lib/absinthe/phase/document/validation/no_undefined_variables.ex
index 87a8c8e1d8..582871a372 100644
--- a/lib/absinthe/phase/document/validation/no_undefined_variables.ex
+++ b/lib/absinthe/phase/document/validation/no_undefined_variables.ex
@@ -7,7 +7,6 @@ defmodule Absinthe.Phase.Document.Validation.NoUndefinedVariables do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/no_unused_fragments.ex b/lib/absinthe/phase/document/validation/no_unused_fragments.ex
index b5a3a2a89e..e2688bf31d 100644
--- a/lib/absinthe/phase/document/validation/no_unused_fragments.ex
+++ b/lib/absinthe/phase/document/validation/no_unused_fragments.ex
@@ -6,7 +6,6 @@ defmodule Absinthe.Phase.Document.Validation.NoUnusedFragments do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/no_unused_variables.ex b/lib/absinthe/phase/document/validation/no_unused_variables.ex
index aaacf2b04b..16788ff434 100644
--- a/lib/absinthe/phase/document/validation/no_unused_variables.ex
+++ b/lib/absinthe/phase/document/validation/no_unused_variables.ex
@@ -7,7 +7,6 @@ defmodule Absinthe.Phase.Document.Validation.NoUnusedVariables do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/only_one_subscription.ex b/lib/absinthe/phase/document/validation/only_one_subscription.ex
index 9a74fb705e..493a541a98 100644
--- a/lib/absinthe/phase/document/validation/only_one_subscription.ex
+++ b/lib/absinthe/phase/document/validation/only_one_subscription.ex
@@ -7,7 +7,6 @@ defmodule Absinthe.Phase.Document.Validation.OnlyOneSubscription do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/provided_an_operation.ex b/lib/absinthe/phase/document/validation/provided_an_operation.ex
index 26c1bd2ffc..869acb86ea 100644
--- a/lib/absinthe/phase/document/validation/provided_an_operation.ex
+++ b/lib/absinthe/phase/document/validation/provided_an_operation.ex
@@ -6,7 +6,6 @@ defmodule Absinthe.Phase.Document.Validation.ProvidedAnOperation do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/provided_non_null_variables.ex b/lib/absinthe/phase/document/validation/provided_non_null_variables.ex
index efe69c4bc7..286ddb910a 100644
--- a/lib/absinthe/phase/document/validation/provided_non_null_variables.ex
+++ b/lib/absinthe/phase/document/validation/provided_non_null_variables.ex
@@ -7,7 +7,6 @@ defmodule Absinthe.Phase.Document.Validation.ProvidedNonNullVariables do
alias Absinthe.{Blueprint, Phase, Schema}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/repeatable_directives.ex b/lib/absinthe/phase/document/validation/repeatable_directives.ex
index d84288333e..4391942904 100644
--- a/lib/absinthe/phase/document/validation/repeatable_directives.ex
+++ b/lib/absinthe/phase/document/validation/repeatable_directives.ex
@@ -4,7 +4,6 @@ defmodule Absinthe.Phase.Document.Validation.RepeatableDirectives do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/scalar_leafs.ex b/lib/absinthe/phase/document/validation/scalar_leafs.ex
index 3843d21267..07d59db28a 100644
--- a/lib/absinthe/phase/document/validation/scalar_leafs.ex
+++ b/lib/absinthe/phase/document/validation/scalar_leafs.ex
@@ -38,7 +38,6 @@ defmodule Absinthe.Phase.Document.Validation.ScalarLeafs do
alias Absinthe.{Blueprint, Phase, Type}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/selected_current_operation.ex b/lib/absinthe/phase/document/validation/selected_current_operation.ex
index 29c83a03f2..89f63ea6c6 100644
--- a/lib/absinthe/phase/document/validation/selected_current_operation.ex
+++ b/lib/absinthe/phase/document/validation/selected_current_operation.ex
@@ -6,7 +6,6 @@ defmodule Absinthe.Phase.Document.Validation.SelectedCurrentOperation do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/unique_argument_names.ex b/lib/absinthe/phase/document/validation/unique_argument_names.ex
index a899b4ff25..ac836c8527 100644
--- a/lib/absinthe/phase/document/validation/unique_argument_names.ex
+++ b/lib/absinthe/phase/document/validation/unique_argument_names.ex
@@ -7,7 +7,6 @@ defmodule Absinthe.Phase.Document.Validation.UniqueArgumentNames do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/unique_fragment_names.ex b/lib/absinthe/phase/document/validation/unique_fragment_names.ex
index 0ec756b3e5..0b706a4cc5 100644
--- a/lib/absinthe/phase/document/validation/unique_fragment_names.ex
+++ b/lib/absinthe/phase/document/validation/unique_fragment_names.ex
@@ -6,7 +6,6 @@ defmodule Absinthe.Phase.Document.Validation.UniqueFragmentNames do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/unique_input_field_names.ex b/lib/absinthe/phase/document/validation/unique_input_field_names.ex
index 758f0b2df7..9d6354f50a 100644
--- a/lib/absinthe/phase/document/validation/unique_input_field_names.ex
+++ b/lib/absinthe/phase/document/validation/unique_input_field_names.ex
@@ -6,7 +6,6 @@ defmodule Absinthe.Phase.Document.Validation.UniqueInputFieldNames do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/unique_operation_names.ex b/lib/absinthe/phase/document/validation/unique_operation_names.ex
index c3f45faafd..31b9de8e68 100644
--- a/lib/absinthe/phase/document/validation/unique_operation_names.ex
+++ b/lib/absinthe/phase/document/validation/unique_operation_names.ex
@@ -6,7 +6,6 @@ defmodule Absinthe.Phase.Document.Validation.UniqueOperationNames do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/unique_variable_names.ex b/lib/absinthe/phase/document/validation/unique_variable_names.ex
index c00064acec..79c9d47156 100644
--- a/lib/absinthe/phase/document/validation/unique_variable_names.ex
+++ b/lib/absinthe/phase/document/validation/unique_variable_names.ex
@@ -7,7 +7,6 @@ defmodule Absinthe.Phase.Document.Validation.UniqueVariableNames do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/document/validation/variables_are_input_types.ex b/lib/absinthe/phase/document/validation/variables_are_input_types.ex
index 66d97923c0..43e37dfa07 100644
--- a/lib/absinthe/phase/document/validation/variables_are_input_types.ex
+++ b/lib/absinthe/phase/document/validation/variables_are_input_types.ex
@@ -7,7 +7,6 @@ defmodule Absinthe.Phase.Document.Validation.VariablesAreInputTypes do
alias Absinthe.{Blueprint, Phase, Schema, Type}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/parse.ex b/lib/absinthe/phase/parse.ex
index 7b04faf130..680671a13e 100644
--- a/lib/absinthe/phase/parse.ex
+++ b/lib/absinthe/phase/parse.ex
@@ -12,16 +12,14 @@ defmodule Absinthe.Phase.Parse do
def run(input, options \\ [])
def run(%Absinthe.Blueprint{} = blueprint, options) do
- options = Map.new(options)
-
- case parse(blueprint.input) do
+ case parse(blueprint.input, options) do
{:ok, value} ->
{:ok, %{blueprint | input: value}}
{:error, error} ->
blueprint
|> add_validation_error(error)
- |> handle_error(options)
+ |> handle_error(Map.new(options))
end
end
@@ -44,12 +42,15 @@ defmodule Absinthe.Phase.Parse do
{:error, blueprint}
end
- @spec tokenize(binary) :: {:ok, [tuple]} | {:error, String.t()}
- def tokenize(input) do
- case Absinthe.Lexer.tokenize(input) do
+ @spec tokenize(binary, Keyword.t()) :: {:ok, [tuple]} | {:error, String.t()}
+ def tokenize(input, options \\ []) do
+ case Absinthe.Lexer.tokenize(input, options) do
{:error, rest, loc} ->
{:error, format_raw_parse_error({:lexer, rest, loc})}
+ {:error, :exceeded_token_limit} ->
+ {:error, %Phase.Error{message: "Token limit exceeded", phase: __MODULE__}}
+
other ->
other
end
@@ -57,15 +58,16 @@ defmodule Absinthe.Phase.Parse do
# This is because Dialyzer is telling us tokenizing can never fail,
# but we know it's possible.
- @dialyzer {:no_match, parse: 1}
- @spec parse(binary | Language.Source.t()) :: {:ok, Language.Document.t()} | {:error, tuple}
- defp parse(input) when is_binary(input) do
- parse(%Language.Source{body: input})
+ @dialyzer {:no_match, parse: 2}
+ @spec parse(binary | Language.Source.t(), Map.t()) ::
+ {:ok, Language.Document.t()} | {:error, tuple}
+ defp parse(input, options) when is_binary(input) do
+ parse(%Language.Source{body: input}, options)
end
- defp parse(input) do
+ defp parse(input, options) do
try do
- case tokenize(input.body) do
+ case tokenize(input.body, options) do
{:ok, []} ->
{:ok, %Language.Document{}}
diff --git a/lib/absinthe/phase/schema/validation.ex b/lib/absinthe/phase/schema/validation.ex
deleted file mode 100644
index 053b25e670..0000000000
--- a/lib/absinthe/phase/schema/validation.ex
+++ /dev/null
@@ -1,11 +0,0 @@
-defmodule Absinthe.Phase.Schema.Validation do
- @moduledoc false
-
- alias Absinthe.Phase
-
- def pipeline do
- [
- Phase.Validation.KnownDirectives
- ]
- end
-end
diff --git a/lib/absinthe/phase/schema/validation/known_directives.ex b/lib/absinthe/phase/schema/validation/known_directives.ex
index 23a0bb2d5a..7b80669b08 100644
--- a/lib/absinthe/phase/schema/validation/known_directives.ex
+++ b/lib/absinthe/phase/schema/validation/known_directives.ex
@@ -4,7 +4,6 @@ defmodule Absinthe.Phase.Schema.Validation.KnownDirectives do
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/phase/schema/validation/object_must_define_fields.ex b/lib/absinthe/phase/schema/validation/object_must_define_fields.ex
index c1e6899bd6..1871bc3e3d 100644
--- a/lib/absinthe/phase/schema/validation/object_must_define_fields.ex
+++ b/lib/absinthe/phase/schema/validation/object_must_define_fields.ex
@@ -18,6 +18,10 @@ defmodule Absinthe.Phase.Schema.Validation.ObjectMustDefineFields do
obj
end
+ defp validate_objects(%Blueprint.Schema.TypeExtensionDefinition{} = node) do
+ {:halt, node}
+ end
+
defp validate_objects(%struct{} = node)
when struct in [
Blueprint.Schema.ObjectTypeDefinition,
diff --git a/lib/absinthe/phase/schema/validation/unique_field_names.ex b/lib/absinthe/phase/schema/validation/unique_field_names.ex
index 40a9774520..9a2aa61c2e 100644
--- a/lib/absinthe/phase/schema/validation/unique_field_names.ex
+++ b/lib/absinthe/phase/schema/validation/unique_field_names.ex
@@ -8,6 +8,7 @@ defmodule Absinthe.Phase.Schema.Validation.UniqueFieldNames do
bp =
bp
|> Blueprint.prewalk(&handle_schemas(&1, :name))
+ |> Blueprint.prewalk(&handle_schemas(&1, :identifier))
{:ok, bp}
end
@@ -32,7 +33,7 @@ defmodule Absinthe.Phase.Schema.Validation.UniqueFieldNames do
name_counts = Enum.frequencies_by(object.fields, &Map.get(&1, key))
if duplicate?(name_counts, field, key) do
- Absinthe.Phase.put_error(field, error(field, object))
+ Absinthe.Phase.put_error(field, error(field, object, key))
else
field
end
@@ -50,20 +51,28 @@ defmodule Absinthe.Phase.Schema.Validation.UniqueFieldNames do
Map.get(name_counts, field_identifier, 0) > 1
end
- defp error(field, object) do
+ defp error(field, object, key) do
%Absinthe.Phase.Error{
- message: explanation(field, object),
+ message: explanation(field, object, key),
locations: [field.__reference__.location],
phase: __MODULE__,
extra: field
}
end
- def explanation(field, object) do
+ def explanation(field, object, :name) do
"""
The field #{inspect(field.name)} is not unique in type #{inspect(object.name)}.
The field must have a unique name within that Object type; no two fields may share the same name.
"""
end
+
+ def explanation(field, object, :identifier) do
+ """
+ The field identifier #{inspect(field.identifier)} is not unique in type #{inspect(object.name)}.
+
+ The field must have a unique identifier within that Object type; no two fields may share the same identifier.
+ """
+ end
end
diff --git a/lib/absinthe/phase/validation.ex b/lib/absinthe/phase/validation.ex
deleted file mode 100644
index 0ee7629cb2..0000000000
--- a/lib/absinthe/phase/validation.ex
+++ /dev/null
@@ -1,26 +0,0 @@
-defmodule Absinthe.Phase.Validation do
- @moduledoc false
-
- alias Absinthe.Blueprint
-
- defmacro __using__(_) do
- quote do
- import unquote(__MODULE__).Helpers
- end
- end
-
- defmodule Helpers do
- @moduledoc false
-
- @spec any_invalid?([Blueprint.node_t()]) :: boolean
- def any_invalid?(nodes) do
- Enum.any?(nodes, fn
- %{flags: %{invalid: _}} ->
- true
-
- _ ->
- false
- end)
- end
- end
-end
diff --git a/lib/absinthe/phase/validation/known_type_names.ex b/lib/absinthe/phase/validation/known_type_names.ex
index 7e6efd9b6b..bd46972826 100644
--- a/lib/absinthe/phase/validation/known_type_names.ex
+++ b/lib/absinthe/phase/validation/known_type_names.ex
@@ -15,7 +15,6 @@ defmodule Absinthe.Phase.Validation.KnownTypeNames do
alias Absinthe.Phase.Document.Validation.Utils
use Absinthe.Phase
- use Absinthe.Phase.Validation
@doc """
Run the validation.
diff --git a/lib/absinthe/resolution/helpers.ex b/lib/absinthe/resolution/helpers.ex
index 94a42e19de..1746b27c94 100644
--- a/lib/absinthe/resolution/helpers.ex
+++ b/lib/absinthe/resolution/helpers.ex
@@ -132,7 +132,9 @@ defmodule Absinthe.Resolution.Helpers do
@type dataloader_opt ::
{:args, map}
| {:use_parent, true | false}
- | {:callback, (map(), map(), map() -> any())}
+ | {:callback,
+ (map(), map(), map() -> any())
+ | (map(), map(), map(), Absinthe.Resolution.t() -> any())}
@doc """
Resolve a field with a dataloader source.
@@ -200,7 +202,7 @@ defmodule Absinthe.Resolution.Helpers do
def dataloader(source, opts) when is_list(opts) do
fn parent, args, %{context: %{loader: loader}} = res ->
resource = res.definition.schema_node.identifier
- do_dataloader(loader, source, {resource, args}, parent, opts)
+ do_dataloader(loader, source, {resource, args}, parent, res, opts)
end
end
@@ -263,6 +265,19 @@ defmodule Absinthe.Resolution.Helpers do
`20`. By passing a callback function to `dataloader/2` we can ensure that
the value will fall nicely between 0 and 20.
+ If you have defined a
+ [custom batch query](https://hexdocs.pm/dataloader/Dataloader.Ecto.html#module-custom-batch-queries),
+ you can instead return a `Map` with `:batch` key to specify the `batch_key` of `Dataloader.load/4` /
+ `Dataloader.get/4` and an `:item` key to specify `item_key` argument of `Dataloader.get/4`.
+
+ ```
+ # ...
+ resolve dataloader(Posts, fn user, _args, _info ->
+ %{batch: {{:one, Post}, %{}}, item: [post_count: user]}
+ end)
+ # ...
+ ```
+
## Options
- `:args` default: `%{}`. Any arguments you want to always pass into the
@@ -270,7 +285,8 @@ defmodule Absinthe.Resolution.Helpers do
in the event of a conflict, the resolver arguments win.
- `:callback` default: return result wrapped in ok or error tuple.
Callback that is run with result of dataloader. It receives the result as
- the first argument, and the parent and args as second and third. Can be used
+ the first argument, and the parent and args as second and third.
+ Optionally can receive resolution as the fourth argument. Can be used
to e.g. compute fields on the return value of the loader. Should return an
ok or error tuple.
- `:use_parent` default: `false`. This option affects whether or not the `dataloader/2`
@@ -310,13 +326,13 @@ defmodule Absinthe.Resolution.Helpers do
%{batch: batch, item: item} -> {batch, item}
end
- do_dataloader(loader, source, batch_key, parent, opts)
+ do_dataloader(loader, source, batch_key, parent, res, opts)
end
end
def dataloader(source, resource, opts) do
- fn parent, args, %{context: %{loader: loader}} ->
- do_dataloader(loader, source, {resource, args}, parent, opts)
+ fn parent, args, %{context: %{loader: loader}} = res ->
+ do_dataloader(loader, source, {resource, args}, parent, res, opts)
end
end
@@ -337,7 +353,7 @@ defmodule Absinthe.Resolution.Helpers do
defp use_parent(loader, _source, _batch_key, _parent, _opts), do: loader
- defp do_dataloader(loader, source, batch_key, parent, opts) do
+ defp do_dataloader(loader, source, batch_key, parent, res, opts) do
args_from_opts = Keyword.get(opts, :args, %{})
{batch_key, args} =
@@ -357,9 +373,19 @@ defmodule Absinthe.Resolution.Helpers do
|> on_load(fn loader ->
callback = Keyword.get(opts, :callback, default_callback(loader))
- loader
- |> Dataloader.get(source, batch_key, parent)
- |> callback.(parent, args)
+ item = Dataloader.get(loader, source, batch_key, parent)
+
+ case callback do
+ callback when is_function(callback, 3) ->
+ callback.(item, parent, args)
+
+ callback when is_function(callback, 4) ->
+ callback.(item, parent, args, res)
+
+ callback ->
+ raise ArgumentError,
+ "Callback must be a function with arity either 3 or 4, got: #{inspect(callback)}"
+ end
end)
end
diff --git a/lib/absinthe/schema/notation.ex b/lib/absinthe/schema/notation.ex
index 717e34b580..3d5b2b08a7 100644
--- a/lib/absinthe/schema/notation.ex
+++ b/lib/absinthe/schema/notation.ex
@@ -14,12 +14,17 @@ defmodule Absinthe.Schema.Notation do
object :item do
field :id, :id
field :name, :string
+ field :status, :status_enum
+ end
+
+ enum :status_enum do
+ value :current
+ value :discontinued
end
# ...
end
-
"""
Module.register_attribute(__MODULE__, :placement, accumulate: true)
@@ -1477,7 +1482,8 @@ defmodule Absinthe.Schema.Notation do
@doc """
Import types defined using the Schema Definition Language (SDL).
- TODO: Explain handlers
+ To add resolvers and middleware to the schema, use the callbacks defined in
+ `Absinthe.Schema`, like `c:Absinthe.Schema.hydrate/2`.
## Placement
@@ -2404,7 +2410,8 @@ defmodule Absinthe.Schema.Notation do
end
defp expand_ast(ast, env) do
- Macro.prewalk(ast, fn
+ ast
+ |> Macro.prewalk(fn
# We don't want to expand `@bla` into `Module.get_attribute(module, @bla)` because this
# function call will fail if the module is already compiled. Remember that the ast gets put
# into a generated `__absinthe_blueprint__` function which is called at "__after_compile__"
@@ -2423,6 +2430,22 @@ defmodule Absinthe.Schema.Notation do
node ->
node
end)
+ |> expand_ast_map()
+ end
+
+ # Handle maps in AST format if they are not escaped in macros
+ defp expand_ast_map({:%{}, _, map_key_values} = _node) when is_list(map_key_values) do
+ map_key_values
+ |> Enum.map(fn {key, val} -> {key, expand_ast_map(val)} end)
+ |> Enum.into(%{})
+ end
+
+ defp expand_ast_map(node) when is_list(node) do
+ Enum.map(node, &expand_ast_map/1)
+ end
+
+ defp expand_ast_map(node) do
+ node
end
@doc false
diff --git a/lib/absinthe/schema/prototype/notation.ex b/lib/absinthe/schema/prototype/notation.ex
index b55de37338..68dce53c2e 100644
--- a/lib/absinthe/schema/prototype/notation.ex
+++ b/lib/absinthe/schema/prototype/notation.ex
@@ -49,6 +49,7 @@ defmodule Absinthe.Schema.Prototype.Notation do
pipeline
|> Absinthe.Pipeline.without(Absinthe.Phase.Schema.Validation.QueryTypeMustBeObject)
|> Absinthe.Pipeline.without(Absinthe.Phase.Schema.ImportPrototypeDirectives)
+ |> Absinthe.Pipeline.without(Absinthe.Phase.Schema.DirectiveImports)
|> Absinthe.Pipeline.replace(
Absinthe.Phase.Schema.TypeExtensionImports,
{Absinthe.Phase.Schema.TypeExtensionImports, []}
diff --git a/lib/absinthe/subscription.ex b/lib/absinthe/subscription.ex
index 1a83cf300f..d35c4f4b28 100644
--- a/lib/absinthe/subscription.ex
+++ b/lib/absinthe/subscription.ex
@@ -35,12 +35,43 @@ defmodule Absinthe.Subscription do
@doc """
Add Absinthe.Subscription to your process tree.
"""
- defdelegate start_link(pubsub), to: Subscription.Supervisor
+ @spec start_link(atom() | [opt()]) :: Supervisor.on_start()
+ defdelegate start_link(opts_or_pubsub), to: Subscription.Supervisor
- def child_spec(pubsub) do
+ @type opt() ::
+ {:pubsub, atom()} | {:compress_registry?, boolean()} | {:pool_size, pos_integer()}
+
+ @doc """
+ Build a child specification for subscriptions.
+
+ In order to use supscriptions in your application, you must add
+ `Absinthe.Subscription` to your supervision tree after your endpoint.
+
+ See `guides/subscriptions.md` for more information on how to get up and
+ running with subscriptions.
+
+ ## Options
+
+ * `:pubsub` - (Required) The `Phoenix.Pubsub` that should be used to publish
+ subscriptions. Typically this will be your `Phoenix.Endpoint`.
+ * `:compress_registry?` - (Optional - default `true`) A boolean controlling
+ whether the Registry used to keep track of subscriptions will should be
+ compressed or not.
+ * `:pool_size` - (Optional - default `System.schedulers() * 2`) An integer
+ specifying the number of `Absinthe.Subscription.Proxy` processes to start.
+ """
+ @spec child_spec(atom() | [opt()]) :: Supervisor.child_spec()
+ def child_spec(pubsub) when is_atom(pubsub) do
+ # child_spec/1 used to take a single argument - the pub-sub - so in order
+ # to maintain compatibility for existing users of the library we still
+ # accept this argument and transform it into a keyword list.
+ child_spec(pubsub: pubsub)
+ end
+
+ def child_spec(opts) when is_list(opts) do
%{
id: __MODULE__,
- start: {Subscription.Supervisor, :start_link, [pubsub]},
+ start: {Subscription.Supervisor, :start_link, [opts]},
type: :supervisor
}
end
diff --git a/lib/absinthe/subscription/supervisor.ex b/lib/absinthe/subscription/supervisor.ex
index c339ea8636..7b63ae4ab9 100644
--- a/lib/absinthe/subscription/supervisor.ex
+++ b/lib/absinthe/subscription/supervisor.ex
@@ -3,9 +3,17 @@ defmodule Absinthe.Subscription.Supervisor do
use Supervisor
- def start_link(pubsub, pool_size \\ System.schedulers_online() * 2) do
+ @spec start_link(atom() | [Absinthe.Subscription.opt()]) :: Supervisor.on_start()
+ def start_link(pubsub) when is_atom(pubsub) do
+ # start_link/1 used to take a single argument - the pub-sub - so in order
+ # to maintain compatibility for existing users of the library we still
+ # accept this argument and transform it into a keyword list.
+ start_link(pubsub: pubsub)
+ end
+
+ def start_link(opts) when is_list(opts) do
pubsub =
- case pubsub do
+ case Keyword.fetch!(opts, :pubsub) do
[module] when is_atom(module) ->
module
@@ -13,10 +21,13 @@ defmodule Absinthe.Subscription.Supervisor do
module
end
- Supervisor.start_link(__MODULE__, {pubsub, pool_size})
+ pool_size = Keyword.get(opts, :pool_size, System.schedulers_online() * 2)
+ compress_registry? = Keyword.get(opts, :compress_registry?, true)
+
+ Supervisor.start_link(__MODULE__, {pubsub, pool_size, compress_registry?})
end
- def init({pubsub, pool_size}) do
+ def init({pubsub, pool_size, compress_registry?}) do
registry_name = Absinthe.Subscription.registry_name(pubsub)
meta = [pool_size: pool_size]
@@ -27,7 +38,7 @@ defmodule Absinthe.Subscription.Supervisor do
name: registry_name,
partitions: System.schedulers_online(),
meta: meta,
- compressed: true
+ compressed: compress_registry?
]},
{Absinthe.Subscription.ProxySupervisor, [pubsub, registry_name, pool_size]}
]
diff --git a/lib/absinthe/type/argument.ex b/lib/absinthe/type/argument.ex
index 3244f2597a..2edafa5ff2 100644
--- a/lib/absinthe/type/argument.ex
+++ b/lib/absinthe/type/argument.ex
@@ -7,8 +7,6 @@ defmodule Absinthe.Type.Argument do
alias Absinthe.Type
- use Type.Fetch
-
@typedoc """
Argument configuration
diff --git a/lib/absinthe/type/built_ins/scalars/utils.ex b/lib/absinthe/type/built_ins/scalars/utils.ex
deleted file mode 100644
index b251b57f7b..0000000000
--- a/lib/absinthe/type/built_ins/scalars/utils.ex
+++ /dev/null
@@ -1,20 +0,0 @@
-defmodule Absinthe.Type.BuiltIns.Scalars.Utils do
- @moduledoc false
-
- # Parse, supporting pulling values out of AST nodes
- defmacro parse_with(node_types, coercion) do
- quote do
- fn
- %{value: value} = node ->
- if Enum.member?(unquote(node_types), node) do
- unquote(coercion).(value)
- else
- nil
- end
-
- other ->
- unquote(coercion).(other)
- end
- end
- end
-end
diff --git a/lib/absinthe/type/fetch.ex b/lib/absinthe/type/fetch.ex
deleted file mode 100644
index f4149a3ba8..0000000000
--- a/lib/absinthe/type/fetch.ex
+++ /dev/null
@@ -1,15 +0,0 @@
-defmodule Absinthe.Type.Fetch do
- @moduledoc false
-
- defmacro __using__(_) do
- quote do
- def fetch(container, key) do
- if Map.has_key?(container, key) do
- {:ok, container |> Map.get(key)}
- else
- :error
- end
- end
- end
- end
-end
diff --git a/lib/absinthe/type/field.ex b/lib/absinthe/type/field.ex
index 9bfca934a1..aac93cc6ef 100644
--- a/lib/absinthe/type/field.ex
+++ b/lib/absinthe/type/field.ex
@@ -12,14 +12,18 @@ defmodule Absinthe.Type.Field do
alias Absinthe.Type
alias Absinthe.Type.Deprecation
- use Type.Fetch
-
@typedoc """
A resolver function.
See the `Absinthe.Type.Field.t` explanation of `:resolve` for more information.
"""
- @type resolver_t :: (%{atom => any}, Absinthe.Resolution.t() -> result)
+ @type resolver_t ::
+ (Absinthe.Resolution.arguments(), Absinthe.Resolution.t() -> result)
+ | (Absinthe.Resolution.source(),
+ Absinthe.Resolution.arguments(),
+ Absinthe.Resolution.t() ->
+ result)
+ | {module(), atom()}
@typedoc """
The result of a resolver.
diff --git a/lib/absinthe/type/input_object.ex b/lib/absinthe/type/input_object.ex
index 64fa4b2bcb..eb9d42d35c 100644
--- a/lib/absinthe/type/input_object.ex
+++ b/lib/absinthe/type/input_object.ex
@@ -32,7 +32,6 @@ defmodule Absinthe.Type.InputObject do
"""
use Absinthe.Introspection.TypeKind, :input_object
- use Absinthe.Type.Fetch
alias Absinthe.Type
diff --git a/lib/absinthe/type/list.ex b/lib/absinthe/type/list.ex
index ccf9ff13da..ba7633f6ec 100644
--- a/lib/absinthe/type/list.ex
+++ b/lib/absinthe/type/list.ex
@@ -19,7 +19,6 @@ defmodule Absinthe.Type.List do
"""
use Absinthe.Introspection.TypeKind, :list
- use Absinthe.Type.Fetch
@typedoc "
A defined list type.
diff --git a/lib/absinthe/type/non_null.ex b/lib/absinthe/type/non_null.ex
index 587104ad9a..0b887ea6e0 100644
--- a/lib/absinthe/type/non_null.ex
+++ b/lib/absinthe/type/non_null.ex
@@ -26,7 +26,6 @@ defmodule Absinthe.Type.NonNull do
"""
use Absinthe.Introspection.TypeKind, :non_null
- use Absinthe.Type.Fetch
@typedoc """
A defined non-null type.
diff --git a/mix.exs b/mix.exs
index 8641e7d8e9..7b2361679f 100644
--- a/mix.exs
+++ b/mix.exs
@@ -2,7 +2,7 @@ defmodule Absinthe.Mixfile do
use Mix.Project
@source_url "https://github.com/absinthe-graphql/absinthe"
- @version "1.7.0"
+ @version "1.7.1"
def project do
[
@@ -75,6 +75,7 @@ defmodule Absinthe.Mixfile do
{:telemetry, "~> 1.0 or ~> 0.4"},
{:dataloader, "~> 1.0.0", optional: true},
{:decimal, "~> 1.0 or ~> 2.0", optional: true},
+ {:opentelemetry_process_propagator, "~> 0.2.1", optional: true},
{:ex_doc, "~> 0.22", only: :dev},
{:benchee, ">= 1.0.0", only: :dev},
{:dialyxir, "~> 1.1.0", only: [:dev, :test], runtime: false},
diff --git a/mix.lock b/mix.lock
index 3845cebc01..a956f2cf5c 100644
--- a/mix.lock
+++ b/mix.lock
@@ -14,5 +14,7 @@
"makeup_graphql": {:hex, :makeup_graphql, "0.1.2", "81e2939aab6d2b81d39ee5d9e13fae02599e9ca6e1152e0eeed737a98a5f96aa", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.1", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "3390ab04ba388d52a94bbe64ef62aa4d7923ceaffac43ec948f58f631440e8fb"},
"mix_test_watch": {:hex, :mix_test_watch, "1.0.2", "34900184cbbbc6b6ed616ed3a8ea9b791f9fd2088419352a6d3200525637f785", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "47ac558d8b06f684773972c6d04fcc15590abdb97aeb7666da19fcbfdc441a07"},
"nimble_parsec": {:hex, :nimble_parsec, "1.2.3", "244836e6e3f1200c7f30cb56733fd808744eca61fd182f731eac4af635cc6d0b", [:mix], [], "hexpm", "c8d789e39b9131acf7b99291e93dae60ab48ef14a7ee9d58c6964f59efb570b0"},
+ "opentelemetry_api": {:hex, :opentelemetry_api, "1.2.0", "454a35655b4c1924405ef1f3587f2c6f141bf73366b2c5e8a38dcc619b53eaa0", [:mix, :rebar3], [], "hexpm", "9e677c68243de0f70538798072e66e1fb1d4a2ca8888a6eb493c0a41e5480c35"},
+ "opentelemetry_process_propagator": {:hex, :opentelemetry_process_propagator, "0.2.1", "20ac37648faf7175cade16fda8d58e6f1ff1b7f2a50a8ef9d70a032c41aba315", [:mix, :rebar3], [{:opentelemetry_api, "~> 1.0", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}], "hexpm", "f317237e39636d4f6140afa5d419e85ed3dc9e9a57072e7cd442df42af7b8aac"},
"telemetry": {:hex, :telemetry, "0.4.3", "a06428a514bdbc63293cd9a6263aad00ddeb66f608163bdec7c8995784080818", [:rebar3], [], "hexpm", "eb72b8365ffda5bed68a620d1da88525e326cb82a75ee61354fc24b844768041"},
}
diff --git a/src/absinthe_parser.yrl b/src/absinthe_parser.yrl
index 4036603a3d..a07f9f0910 100644
--- a/src/absinthe_parser.yrl
+++ b/src/absinthe_parser.yrl
@@ -178,6 +178,7 @@ ValueConst -> ObjectValueConst : build_ast_node('ObjectValue', #{'fields' =>
EnumValue -> Name : extract_binary('$1').
+ListValueConst -> '[' ']' : [].
ListValueConst -> '[' ValuesConst ']' : '$2'.
ValuesConst -> ValueConst : ['$1'].
ValuesConst -> ValueConst ValuesConst : ['$1'|'$2'].
diff --git a/test/absinthe/integration/execution/token_limit_enforcement.exs b/test/absinthe/integration/execution/token_limit_enforcement.exs
new file mode 100644
index 0000000000..e3d8e9e02b
--- /dev/null
+++ b/test/absinthe/integration/execution/token_limit_enforcement.exs
@@ -0,0 +1,41 @@
+defmodule Elixir.Absinthe.Integration.Execution.TokenLimitEnforcement do
+ use Absinthe.Case, async: true
+
+ test "Token limit lexer enforcement is precise" do
+ query = """
+ {
+ __typename @a @b @c @d @e
+ }
+ """
+
+ assert {:ok, %{errors: [%{message: "Token limit exceeded"}]}} ==
+ Absinthe.run(query, Absinthe.Fixtures.Things.MacroSchema, token_limit: 12)
+
+ refute {:ok, %{errors: [%{message: "Token limit exceeded"}]}} ==
+ Absinthe.run(query, Absinthe.Fixtures.Things.MacroSchema, token_limit: 13)
+
+ query = """
+ {
+ test(arg1: false, arg2: ["hi \u1F600", "hello", null], arg3: 3.14) {
+ results {
+ id #it's a guid
+ name @fakedirective
+
+ ... on SomeType {
+ some\u0046ield #should expand to someField without an extra token
+ }
+ }
+ }
+ }
+ """
+
+ # token count 33 = 8 braces + 2 parens + 2 brackets + 2 string values + 1 null + 1 float + 1 bool +
+ # 3 colons + 1 ... + 1 on + 0 ignroed comment + 1@ + 1 directive + 9 names
+
+ assert {:ok, %{errors: [%{message: "Token limit exceeded"}]}} ==
+ Absinthe.run(query, Absinthe.Fixtures.Things.MacroSchema, token_limit: 32)
+
+ refute {:ok, %{errors: [%{message: "Token limit exceeded"}]}} ==
+ Absinthe.run(query, Absinthe.Fixtures.Things.MacroSchema, token_limit: 33)
+ end
+end
diff --git a/test/absinthe/lexer_test.exs b/test/absinthe/lexer_test.exs
index a56f4c0015..592399f6ba 100644
--- a/test/absinthe/lexer_test.exs
+++ b/test/absinthe/lexer_test.exs
@@ -94,4 +94,35 @@ defmodule Absinthe.LexerTest do
{:"}", {7, 1}}
]} == Absinthe.Lexer.tokenize(@query)
end
+
+ @tag timeout: 3_000
+ test "long query doesn't take too long" do
+ # This tests the performance of long queries. Before optimization work, this
+ # test took 16 seconds. After optimization it took 0.08 seconds. Setting
+ # a generous ExUnit timeout ensures there has not been a performance regression
+ # while hopefully preventing testing fragility.
+ many_directives = String.duplicate("@abc ", 10_000)
+ {:ok, _} = Absinthe.Lexer.tokenize("{ __typename #{many_directives} }")
+ end
+
+ test "document with tokens exceeding limit" do
+ query = too_long_query()
+
+ assert {:error, :exceeded_token_limit} ==
+ Absinthe.Lexer.tokenize(query, token_limit: 15_000)
+
+ refute {:error, :exceeded_token_limit} ==
+ Absinthe.Lexer.tokenize(query)
+ end
+
+ defp too_long_query do
+ Enum.to_list(for n <- 1..10000, do: "test#{n}")
+ |> deep_query()
+ end
+
+ defp deep_query([]), do: ""
+
+ defp deep_query([field | rest]) do
+ "{ #{field} #{deep_query(rest)} }"
+ end
end
diff --git a/test/absinthe/middleware/async_test.exs b/test/absinthe/middleware/async_test.exs
index 49e39a196e..e797c90838 100644
--- a/test/absinthe/middleware/async_test.exs
+++ b/test/absinthe/middleware/async_test.exs
@@ -48,6 +48,14 @@ defmodule Absinthe.Middleware.AsyncTest do
{:middleware, Elixir.Absinthe.Middleware.Async, task}
end
end
+
+ field :async_check_otel_ctx, :string do
+ resolve fn _, _, _ ->
+ async(fn ->
+ {:ok, OpenTelemetry.Ctx.get_value("stored_value", nil)}
+ end)
+ end
+ end
end
def cool_async(fun) do
@@ -125,4 +133,14 @@ defmodule Absinthe.Middleware.AsyncTest do
assert {:ok, %{data: %{"returnsNil" => nil}}} == Absinthe.run(doc, Schema)
end
+
+ test "propagates the OTel context" do
+ doc = """
+ {asyncCheckOtelCtx}
+ """
+
+ OpenTelemetry.Ctx.set_value("stored_value", "some_value")
+
+ assert {:ok, %{data: %{"asyncCheckOtelCtx" => "some_value"}}} == Absinthe.run(doc, Schema)
+ end
end
diff --git a/test/absinthe/middleware/batch_test.exs b/test/absinthe/middleware/batch_test.exs
index d099055649..11b9654ddb 100644
--- a/test/absinthe/middleware/batch_test.exs
+++ b/test/absinthe/middleware/batch_test.exs
@@ -52,11 +52,23 @@ defmodule Absinthe.Middleware.BatchTest do
end)
end
end
+
+ field :ctx, :string do
+ resolve fn _, _, _ ->
+ batch({__MODULE__, :otel_ctx}, nil, fn batch ->
+ {:ok, batch}
+ end)
+ end
+ end
end
def by_id(_, ids) do
Map.take(@organizations, ids)
end
+
+ def otel_ctx(_, _) do
+ OpenTelemetry.Ctx.get_value("stored_value", nil)
+ end
end
test "can resolve a field using the normal async helper" do
@@ -128,4 +140,14 @@ defmodule Absinthe.Middleware.BatchTest do
assert_receive {:telemetry_event, [:absinthe, :middleware, :batch, :stop], %{duration: _},
%{id: _, batch_fun: _, batch_opts: _, batch_data: _, result: _}}
end
+
+ test "propagates the OTel context" do
+ doc = """
+ {ctx}
+ """
+
+ OpenTelemetry.Ctx.set_value("stored_value", "some_value")
+
+ assert {:ok, %{data: %{"ctx" => "some_value"}}} == Absinthe.run(doc, Schema)
+ end
end
diff --git a/test/absinthe/middleware/dataloader_test.exs b/test/absinthe/middleware/dataloader_test.exs
index 201cd1bfcf..0c82a7cdc8 100644
--- a/test/absinthe/middleware/dataloader_test.exs
+++ b/test/absinthe/middleware/dataloader_test.exs
@@ -77,6 +77,26 @@ defmodule Absinthe.Middleware.DataloaderTest do
field :bar_organization, :organization do
resolve dataloader(:test, :organization, args: %{pid: self()}, use_parent: true)
end
+
+ field :bar_organization_name, :string do
+ resolve dataloader(
+ :test,
+ :organization,
+ args: %{pid: self()},
+ callback: fn organization, _parent, _args ->
+ {:ok, organization.name}
+ end
+ )
+ end
+
+ field :bar_organization_state, :string do
+ resolve dataloader(:test, :organization,
+ args: %{pid: self()},
+ callback: fn organization, _parent, _args, resolution ->
+ {:ok, "#{organization.name} - #{resolution.state}"}
+ end
+ )
+ end
end
query do
@@ -165,6 +185,40 @@ defmodule Absinthe.Middleware.DataloaderTest do
refute_receive(:loading)
end
+ test "can resolve fields using dataloader helper with callback" do
+ doc = """
+ {
+ users {
+ organizationName: barOrganizationName
+ organizationState: barOrganizationState
+ }
+ }
+ """
+
+ expected_data = %{
+ "users" => [
+ %{
+ "organizationName" => "Organization: #1",
+ "organizationState" => "Organization: #1 - unresolved"
+ },
+ %{
+ "organizationName" => "Organization: #2",
+ "organizationState" => "Organization: #2 - unresolved"
+ },
+ %{
+ "organizationName" => "Organization: #3",
+ "organizationState" => "Organization: #3 - unresolved"
+ }
+ ]
+ }
+
+ assert {:ok, %{data: data}} = Absinthe.run(doc, DefaultSchema)
+ assert expected_data == data
+
+ assert_receive(:loading)
+ refute_receive(:loading)
+ end
+
test "can resolve a field when dataloader uses 'tuples' get_policy" do
doc = """
{
diff --git a/test/absinthe/phase/document/validation/arguments_of_correct_type_test.exs b/test/absinthe/phase/document/validation/arguments_of_correct_type_test.exs
index 4eb2b69867..64a30cdd80 100644
--- a/test/absinthe/phase/document/validation/arguments_of_correct_type_test.exs
+++ b/test/absinthe/phase/document/validation/arguments_of_correct_type_test.exs
@@ -873,6 +873,48 @@ defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectTypeTest do
end
describe "Invalid input object value" do
+ test "Not an input object, an unquoted string" do
+ assert_fails_validation(
+ """
+ {
+ complicatedArgs {
+ complexArgField(complexArg: SIT)
+ }
+ }
+ """,
+ [],
+ [bad_argument("complexArg", "ComplexInput", "SIT", 3, [])]
+ )
+ end
+
+ test "Not an input object, a string" do
+ assert_fails_validation(
+ """
+ {
+ complicatedArgs {
+ complexArgField(complexArg: "SIT")
+ }
+ }
+ """,
+ [],
+ [bad_argument("complexArg", "ComplexInput", ~s("SIT"), 3, [])]
+ )
+ end
+
+ test "Not an input object, a number" do
+ assert_fails_validation(
+ """
+ {
+ complicatedArgs {
+ complexArgField(complexArg: 42)
+ }
+ }
+ """,
+ [],
+ [bad_argument("complexArg", "ComplexInput", "42", 3, [])]
+ )
+ end
+
test "Partial object, missing required" do
assert_fails_validation(
"""
diff --git a/test/absinthe/phase/execution/non_null_test.exs b/test/absinthe/phase/execution/non_null_test.exs
index 0895988574..9d62e41fd0 100644
--- a/test/absinthe/phase/execution/non_null_test.exs
+++ b/test/absinthe/phase/execution/non_null_test.exs
@@ -16,6 +16,18 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do
{:ok, %{}}
end
+ defp things_resolver(_, %{make_null: make_null}, _) do
+ if make_null do
+ {:ok, [nil]}
+ else
+ {:ok, [%{}]}
+ end
+ end
+
+ defp things_resolver(_, _, _) do
+ {:ok, [%{}]}
+ end
+
object :thing do
field :nullable, :thing do
arg :make_null, :boolean
@@ -38,6 +50,11 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do
{:error, "boom"}
end
end
+
+ field :non_null_list_of_non_null, non_null(list_of(non_null(:thing))) do
+ arg :make_null, :boolean
+ resolve &things_resolver/3
+ end
end
query do
@@ -53,21 +70,16 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do
end
field :nullable_list_of_nullable, list_of(:thing) do
- resolve fn _, _ ->
- {:ok, [%{}]}
- end
+ resolve &things_resolver/3
end
field :nullable_list_of_non_null, list_of(non_null(:thing)) do
- resolve fn _, _ ->
- {:ok, [%{}]}
- end
+ resolve &things_resolver/3
end
field :non_null_list_of_non_null, non_null(list_of(non_null(:thing))) do
- resolve fn _, _ ->
- {:ok, [%{}]}
- end
+ arg :make_null, :boolean
+ resolve &things_resolver/3
end
@desc """
@@ -250,5 +262,66 @@ defmodule Absinthe.Phase.Document.Execution.NonNullTest do
assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema)
end
+
+ test "list of non null things works when child is null" do
+ doc = """
+ {
+ nonNullListOfNonNull(makeNull: true) { __typename }
+ }
+ """
+
+ data = nil
+
+ errors = [
+ %{
+ locations: [%{column: 3, line: 2}],
+ message: "Cannot return null for non-nullable field",
+ path: ["nonNullListOfNonNull", 0]
+ }
+ ]
+
+ assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema)
+ end
+
+ test "returning null from a non null list makes the parent nullable null at arbitrary depth" do
+ doc = """
+ {
+ nullableListOfNonNull {
+ nonNullListOfNonNull {
+ nonNullListOfNonNull {
+ nonNullListOfNonNull {
+ nonNullListOfNonNull(makeNull: true) { __typename }
+ }
+ }
+ }
+ }
+ }
+ """
+
+ data = %{"nullableListOfNonNull" => nil}
+
+ path = [
+ "nullableListOfNonNull",
+ 0,
+ "nonNullListOfNonNull",
+ 0,
+ "nonNullListOfNonNull",
+ 0,
+ "nonNullListOfNonNull",
+ 0,
+ "nonNullListOfNonNull",
+ 0
+ ]
+
+ errors = [
+ %{
+ locations: [%{column: 11, line: 6}],
+ message: "Cannot return null for non-nullable field",
+ path: path
+ }
+ ]
+
+ assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema)
+ end
end
end
diff --git a/test/absinthe/phase/parse/const_usage_test.exs b/test/absinthe/phase/parse/const_usage_test.exs
index faaca97086..4bae0a3145 100644
--- a/test/absinthe/phase/parse/const_usage_test.exs
+++ b/test/absinthe/phase/parse/const_usage_test.exs
@@ -4,6 +4,18 @@ defmodule Absinthe.Phase.Parse.ConstUsageTest do
@moduletag :parser
describe "composed constants" do
+ test "list in a constant location can be empty " do
+ result =
+ """
+ schema @feature(name: []){
+ query: Query
+ }
+ """
+ |> run
+
+ assert {:ok, _} = result
+ end
+
test "list in a constant location cannot contain variables " do
result =
"""
@@ -181,7 +193,9 @@ defmodule Absinthe.Phase.Parse.ConstUsageTest do
end
def run(input) do
- {:error, blueprint} = Absinthe.Phase.Parse.run(input)
- {:error, blueprint.execution.validation_errors}
+ case Absinthe.Phase.Parse.run(input) do
+ {:error, blueprint} -> {:error, blueprint.execution.validation_errors}
+ {:ok, blueprint} -> {:ok, blueprint}
+ end
end
end
diff --git a/test/absinthe/schema/manipulation_test.exs b/test/absinthe/schema/manipulation_test.exs
index 674086a2f5..ce1066d17f 100644
--- a/test/absinthe/schema/manipulation_test.exs
+++ b/test/absinthe/schema/manipulation_test.exs
@@ -25,7 +25,7 @@ defmodule Absinthe.Schema.ManipulationTest do
description "Simple Helper Object used to define blueprint fields"
field :simple_string, :string do
- description "customer introspection field"
+ description "custom introspection field"
resolve fn _, %{schema: schema} ->
{:ok, "This is a new introspection type on #{inspect(schema)}"}
@@ -39,7 +39,7 @@ defmodule Absinthe.Schema.ManipulationTest do
%{
source: source
} ->
- private = source[:__private__] || []
+ private = source.__private__ || []
meta_items = private[:meta] || []
{:ok, meta_items[:some_string_meta]}
diff --git a/test/absinthe/schema/notation/experimental/macro_extensions_test.exs b/test/absinthe/schema/notation/experimental/macro_extensions_test.exs
index 39fdaa21af..917b2a168f 100644
--- a/test/absinthe/schema/notation/experimental/macro_extensions_test.exs
+++ b/test/absinthe/schema/notation/experimental/macro_extensions_test.exs
@@ -8,8 +8,13 @@ defmodule Absinthe.Schema.Notation.Experimental.MacroExtensionsTest do
defmodule WithFeatureDirective do
use Absinthe.Schema.Prototype
+ input_object :related_feature do
+ field :name, :string
+ end
+
directive :feature do
arg :name, :string
+ arg :related_features, list_of(:related_feature)
on [:scalar, :schema]
expand(fn _args, node ->
@@ -28,7 +33,7 @@ defmodule Absinthe.Schema.Notation.Experimental.MacroExtensionsTest do
end
extend schema do
- directive :feature
+ directive :feature, related_features: [%{"name" => "another_feature"}]
end
object :person do
@@ -237,6 +242,12 @@ defmodule Absinthe.Schema.Notation.Experimental.MacroExtensionsTest do
assert [:valued_entity] = object.interfaces
end
+ test "can use map in arguments" do
+ sdl = Absinthe.Schema.to_sdl(ExtendedSchema)
+
+ assert sdl =~ "schema @feature(related_features: [{name: \"another_feature\"}])"
+ end
+
test "raises when definition types do not match" do
schema = """
defmodule KeywordExtend do
diff --git a/test/absinthe/schema/rule/object_must_define_fields_test.exs b/test/absinthe/schema/rule/object_must_define_fields_test.exs
index ca70303fca..5870b47351 100644
--- a/test/absinthe/schema/rule/object_must_define_fields_test.exs
+++ b/test/absinthe/schema/rule/object_must_define_fields_test.exs
@@ -121,4 +121,24 @@ defmodule Absinthe.Schema.Rule.ObjectMustDefineFieldsTest do
Code.eval_string(@schema)
end)
end
+
+ @schema ~S(
+ defmodule ExtendObjectSchema do
+ use Absinthe.Schema
+
+ query do
+ field :foo, :string
+ end
+
+ object :bar do
+ field :baz, :string
+ end
+
+ extend object :bar do
+ end
+ end
+ )
+ test "does not error on empty object extension" do
+ assert Code.eval_string(@schema)
+ end
end
diff --git a/test/absinthe/schema/rule/unique_field_names_test.exs b/test/absinthe/schema/rule/unique_field_names_test.exs
index 8e4415ab77..a62329e53f 100644
--- a/test/absinthe/schema/rule/unique_field_names_test.exs
+++ b/test/absinthe/schema/rule/unique_field_names_test.exs
@@ -1,7 +1,21 @@
defmodule Absinthe.Schema.Rule.UniqueFieldNamesTest do
use Absinthe.Case, async: true
- @duplicate_object_fields ~S(
+ @duplicate_object_fields_macro ~S(
+ defmodule DuplicateObjectFields do
+ use Absinthe.Schema
+
+ query do
+ end
+
+ object :dog do
+ field :name, :string
+ field :name, :integer, name: "dogName"
+ end
+ end
+ )
+
+ @duplicate_object_fields_sdl ~S(
defmodule DuplicateObjectFields do
use Absinthe.Schema
@@ -49,11 +63,19 @@ defmodule Absinthe.Schema.Rule.UniqueFieldNamesTest do
end
)
+ test "errors on non unique object field identifier" do
+ error = ~r/The field identifier :name is not unique in type \"Dog\"./
+
+ assert_raise(Absinthe.Schema.Error, error, fn ->
+ Code.eval_string(@duplicate_object_fields_macro)
+ end)
+ end
+
test "errors on non unique object field names" do
error = ~r/The field \"name\" is not unique in type \"Dog\"./
assert_raise(Absinthe.Schema.Error, error, fn ->
- Code.eval_string(@duplicate_object_fields)
+ Code.eval_string(@duplicate_object_fields_sdl)
end)
end
diff --git a/test/absinthe/type/interface_test.exs b/test/absinthe/type/interface_test.exs
index 59cd4f9ffa..43393fb1ce 100644
--- a/test/absinthe/type/interface_test.exs
+++ b/test/absinthe/type/interface_test.exs
@@ -316,4 +316,129 @@ defmodule Absinthe.Type.InterfaceTest do
test "resolved type of nested interfaces" do
assert_data(%{"root" => %{"__typename" => "ZChild"}}, run(@graphql, NestedInterfacesSchema))
end
+
+ defmodule PrivacyUsingNestedSchema do
+ @moduledoc """
+ Schema in which we return
+ public or private view of collections depending on auth status, and for
+ items depending on collection. PrivateItem will be returned for items in
+ PrivateCollection
+
+ """
+ use Absinthe.Schema
+
+ @data [
+ %{
+ name: "Travel Books",
+ items: [
+ %{title: "To the moon and back", content: "How to build a rocket"},
+ %{title: "Ends of the world", content: "How it looks in the end?"}
+ ]
+ },
+ %{
+ name: "Cuisine",
+ items: [
+ %{title: "Polish soups", content: "All on pomidorowa soup"},
+ %{title: "Only sweets", content: "Carb diet for the win!"}
+ ]
+ }
+ ]
+
+ def data(), do: @data
+
+ query do
+ field :collections, list_of(:collection) do
+ resolve fn _, _, _ ->
+ {:ok, @data}
+ end
+ end
+ end
+
+ interface :collection do
+ description "A collection"
+ field :name, non_null(:string)
+
+ resolve_type fn value, %{context: %{auth: is_auth}} ->
+ if is_auth, do: :private_collection, else: :public_collection
+ end
+ end
+
+ object :public_collection do
+ interface :collection
+ import_fields :collection
+ end
+
+ object :private_collection do
+ interface :collection
+ import_fields :collection
+ field :items, list_of(:item)
+ end
+
+ interface :item do
+ description "An item"
+ field :title, non_null(:string)
+
+ resolve_type fn value, %{path: path} ->
+ assert [
+ idx,
+ %{name: "items", parent_type: %{identifier: parent_id}},
+ outer_idx,
+ %{name: "collections"} | _
+ ] = path
+
+ assert idx in 0..1
+ assert outer_idx in 0..1
+ assert parent_id == :private_collection
+
+ if parent_id == :private_collection, do: :private_item, else: :public_item
+ end
+ end
+
+ object :public_item do
+ interface :item
+ import_fields :item
+ end
+
+ object :private_item do
+ interface :item
+ import_fields :item
+ field :content, :string
+ end
+ end
+
+ # deep convert keys from atoms to strings - is it available somewhere in library?
+ defp stringify_keys(v) when is_list(v) do
+ Enum.map(v, &stringify_keys/1)
+ end
+
+ defp stringify_keys(v) when is_map(v) do
+ Enum.into(Enum.map(v, fn {k, v} -> {Atom.to_string(k), stringify_keys(v)} end), %{})
+ end
+
+ defp stringify_keys(v) do
+ v
+ end
+
+ @graphql """
+ query books {
+ collections {
+ name
+ ... on PrivateCollection {
+ items {
+ ... on PrivateItem {
+ title content
+ }
+ }
+ }
+ }
+ }
+ """
+ test "Nested interface resolution passes correct data to resolve_type" do
+ stringified_data = stringify_keys(PrivacyUsingNestedSchema.data())
+
+ assert_data(
+ %{"collections" => stringified_data},
+ run(@graphql, PrivacyUsingNestedSchema, context: %{auth: true})
+ )
+ end
end