Skip to content

Instantly share code, notes, and snippets.

@moxley
Created September 23, 2018 05:34
Show Gist options
  • Save moxley/fbc88f44b47f7c1978574ad6453e6971 to your computer and use it in GitHub Desktop.
Save moxley/fbc88f44b47f7c1978574ad6453e6971 to your computer and use it in GitHub Desktop.
Concatenated ball of Elixir
This file has been truncated, but you can view the full file.
defmodule Absinthe.Mixfile do
use Mix.Project
@version "1.5.0-dev"
def project do
[
app: :absinthe,
version: @version,
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
package: package(),
source_url: "https://github.com/absinthe-graphql/absinthe",
docs: [
source_ref: "v#{@version}",
main: "overview",
logo: "logo.png",
extra_section: "GUIDES",
assets: "guides/assets",
formatters: ["html", "epub"],
groups_for_modules: groups_for_modules(),
extras: extras(),
groups_for_extras: groups_for_extras()
],
deps: deps()
]
end
defp package do
[
description: "GraphQL for Elixir",
files: ["lib", "src", "priv", "mix.exs", "README.md", "CHANGELOG.md", ".formatter.exs"],
maintainers: [
"Bruce Williams",
"Ben Wilson"
],
licenses: ["MIT"],
links: %{
Website: "https://absinthe-graphql.org",
Changelog: "https://github.com/absinthe-graphql/absinthe/blob/master/CHANGELOG.md",
GitHub: "https://github.com/absinthe-graphql/absinthe"
}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def application do
[applications: [:logger]]
end
defp deps do
[
{:nimble_parsec, "~> 0.4", override: true},
{:dataloader, "~> 1.0.0", optional: true},
{:ex_doc, "~> 0.14", only: :dev},
{:benchfella, "~> 0.3.0", only: :dev},
{:dialyze, "~> 0.2", only: :dev},
{:decimal, "~> 1.0", optional: true},
{:phoenix_pubsub, ">= 0.0.0", only: :test},
{:mix_test_watch, "~> 0.4.1", only: [:test, :dev]}
]
end
#
# Documentation
#
defp extras do
[
"guides/introduction/overview.md",
"guides/introduction/installation.md",
"guides/introduction/learning.md",
"guides/introduction/community.md",
"guides/tutorial/start.md",
"guides/tutorial/our-first-query.md",
"guides/tutorial/query-arguments.md",
"guides/tutorial/mutations.md",
"guides/tutorial/complex-arguments.md",
"guides/tutorial/conclusion.md",
"guides/schemas.md",
"guides/plug-phoenix.md",
"guides/ecto.md",
"guides/middleware-and-plugins.md",
"guides/errors.md",
"guides/batching.md",
"guides/dataloader.md",
"guides/context-and-authentication.md",
"guides/subscriptions.md",
"guides/custom-scalars.md",
"guides/importing-types.md",
"guides/importing-fields.md",
"guides/variables.md",
"guides/introspection.md",
"guides/deprecation.md",
"guides/adapters.md",
"guides/complexity-analysis.md",
"guides/file-uploads.md",
"guides/client/javascript.md",
"guides/client/apollo.md",
"guides/client/relay.md",
"guides/upgrading/v1.4.md"
]
end
defp groups_for_extras do
[
Introduction: ~r/guides\/introduction\/.*/,
Tutorial: ~r/guides\/tutorial\/.*/,
Topics: ~r/guides\/[^\/]+\.md/,
"Client Guides": ~r/guides\/client\/.*/,
"Upgrade Guides": ~r/guides\/upgrading\/.*/
]
end
defp groups_for_modules do
# Ungrouped:
# - Absinthe
[
"Schema Definition and Types": [
Absinthe.Schema,
Absinthe.Schema.Notation,
Absinthe.Resolution.Helpers,
Absinthe.Type,
Absinthe.Type.Custom,
Absinthe.Type.Argument,
Absinthe.Type.Custom,
Absinthe.Type.Directive,
Absinthe.Type.Enum,
Absinthe.Type.Enum.Value,
Absinthe.Type.Field,
Absinthe.Type.InputObject,
Absinthe.Type.Interface,
Absinthe.Type.List,
Absinthe.Type.NonNull,
Absinthe.Type.Object,
Absinthe.Type.Scalar,
Absinthe.Type.Union
],
"Middleware and Plugins": [
Absinthe.Middleware,
Absinthe.Plugin,
Absinthe.Middleware.Async,
Absinthe.Middleware.Batch,
Absinthe.Middleware.Dataloader,
Absinthe.Middleware.MapGet,
Absinthe.Middleware.PassParent
],
Subscriptions: [
Absinthe.Subscription,
Absinthe.Subscription.Pubsub
],
Extensibility: [
Absinthe.Pipeline,
Absinthe.Phase,
Absinthe.Phase.Validation.Helpers,
Absinthe.Pipeline.ErrorResult
],
"Document Adapters": [
Absinthe.Adapter,
Absinthe.Adapter.LanguageConventions,
Absinthe.Adapter.Passthrough,
Absinthe.Adapter.Underscore
],
Execution: [
Absinthe.Blueprint,
Absinthe.Blueprint.Execution,
Absinthe.Traversal,
Absinthe.Resolution,
Absinthe.Complexity
],
Introspection: [
Absinthe.Introspection
],
Testing: [
Absinthe.Test
],
Utilities: [
Absinthe.Logger,
Absinthe.Utils,
Absinthe.Utils.Suggestion
]
]
end
end
defmodule Absinthe.ValidationPhaseCase do
import ExUnit.Assertions
alias Absinthe.{Blueprint, Schema, Phase, Pipeline, Language}
@type error_checker_t :: ([{Blueprint.t(), Blueprint.Error.t()}] -> boolean)
def get_error_location(line) do
case List.wrap(line) do
[single] ->
"(from line ##{single})"
multiple when is_list(multiple) ->
numbers = multiple |> Enum.join(", #")
"(from lines ##{numbers})"
nil ->
"(at any line number)"
end
end
defmacro __using__(opts) do
phase = Keyword.fetch!(opts, :phase)
quote do
use Absinthe.Case, unquote(opts)
import unquote(__MODULE__)
def bad_value(node_kind, message, line, check \\ []) do
location = unquote(__MODULE__).get_error_location(line)
expectation_banner =
"\nExpected #{node_kind} node with error #{location}:\n---\n#{message}\n---"
check_fun = node_check_function(check)
fn pairs ->
assert !Enum.empty?(pairs), "No errors were found.\n#{expectation_banner}"
matched =
Enum.any?(pairs, fn
{%str{} = node, %Phase.Error{phase: unquote(phase), message: ^message} = err}
when str == node_kind ->
if check_fun.(node) do
if !line do
true
else
List.wrap(line)
|> Enum.all?(fn l ->
Enum.any?(err.locations, fn
%{line: ^l} ->
true
_ ->
false
end)
end)
end
else
false
end
_ ->
false
end)
formatted_errors =
Enum.map(pairs, fn {_, error} ->
"#{error.message} (from line #{inspect(error.locations)})"
end)
assert matched,
"Could not find error.\n#{expectation_banner}\n\n Did find these errors...\n ---\n " <>
Enum.join(formatted_errors, "\n ") <> "\n ---"
end
end
defp node_check_function(check) when is_list(check) do
fn node ->
Enum.all?(check, fn {key, value} -> Map.get(node, key) == value end)
end
end
defp node_check_function(check) when is_function(check) do
check
end
@spec assert_passes_validation(PhaseLanguage.Source.t(), map) :: no_return
def assert_passes_validation(document, options) do
assert_valid(Absinthe.Fixtures.PetsSchema, [unquote(phase)], document, options)
end
@spec assert_fails_validation(
Language.Source.t(),
map,
[Absinthe.ValidationPhaseCase.error_checker_t()]
| Absinthe.ValidationPhaseCase.error_checker_t()
) :: no_return
def assert_fails_validation(document, options, error_checker) do
assert_invalid(
Absinthe.Fixtures.PetsSchema,
[unquote(phase)],
document,
options,
error_checker
)
end
@spec assert_passes_validation_with_schema(Schema.t(), Language.Source.t(), map) ::
no_return
def assert_passes_validation_with_schema(schema, document, options) do
assert_valid(schema, [unquote(phase)], document, options)
end
@spec assert_fails_validation_with_schema(
Schema.t(),
Language.Source.t(),
map,
Absinthe.ValidationPhaseCase.error_checker_t()
) :: no_return
def assert_fails_validation_with_schema(schema, document, options, error_checker) do
assert_invalid(schema, [unquote(phase)], document, options, error_checker)
end
end
end
@spec assert_valid(Schema.t(), [Phase.t()], Language.Source.t(), map) :: no_return
def assert_valid(schema, validations, document, options) do
result =
case run(schema, validations, document, options) do
{:ok, result} ->
result
# :jump, etc
{_other, result, _config} ->
result
end
formatted_errors =
result
|> error_pairs
|> Enum.map(fn {_, error} ->
error.message
end)
assert Enum.empty?(formatted_errors),
"Expected no errors, found:\n ---\n " <>
Enum.join(formatted_errors, "\n ") <> "\n ---"
end
@spec assert_invalid(
Schema.t(),
[Phase.t()],
Language.Source.t(),
map,
[error_checker_t] | error_checker_t
) :: no_return
def assert_invalid(schema, validations, document, options, error_checkers) do
result =
case run(schema, validations, document, options) do
{:ok, result, _} ->
result
# :jump, etc
{_other, result, _config} ->
result
end
pairs = error_pairs(result)
List.wrap(error_checkers)
|> Enum.each(& &1.(pairs))
end
defp run(schema, validations, document, options) do
pipeline = pre_validation_pipeline(schema, validations, options)
Pipeline.run(document, pipeline)
end
defp pre_validation_pipeline(schema, validations, :schema) do
Pipeline.for_schema(schema)
|> Pipeline.upto(Phase.Schema)
|> Kernel.++(validations)
end
defp pre_validation_pipeline(schema, validations, options) do
options = Keyword.put(options, :jump_phases, false)
Pipeline.for_document(schema, options)
|> Pipeline.upto(Phase.Document.Validation.Result)
|> Pipeline.reject(fn phase ->
Regex.match?(~r/Validation/, Atom.to_string(phase)) and
not (phase in [Phase.Document.Validation.Result | validations])
end)
end
# Build a map of node => errors
defp nodes_with_errors(input) do
{_, errors} = Blueprint.prewalk(input, [], &do_nodes_with_errors/2)
errors
end
defp error_pairs(input) do
input
|> nodes_with_errors()
|> Enum.flat_map(fn %{errors: errors} = node ->
Enum.map(errors, &{node, &1})
end)
end
defp do_nodes_with_errors(%{raw: raw} = node, acc) do
{_, errors} = Blueprint.prewalk(raw, acc, &do_nodes_with_errors/2)
{node, errors}
end
defp do_nodes_with_errors(%{errors: []} = node, acc) do
{node, acc}
end
defp do_nodes_with_errors(%{errors: _} = node, acc) do
{node, [node | acc]}
end
defp do_nodes_with_errors(node, acc) do
{node, acc}
end
end
defmodule Absinthe.PhaseCase do
defmacro __using__(opts) do
phase = Keyword.fetch!(opts, :phase)
schema = Keyword.fetch!(opts, :schema)
quote do
@doc """
Execute the pipeline up to and through a phase.
"""
use Absinthe.Case, unquote(opts)
@spec run_phase(String.t(), Keyword.t()) :: Absinthe.Phase.result_t()
def run_phase(query, options) do
options =
options
|> Keyword.put(:jump_phases, false)
|> Keyword.put_new(:analyze_complexity, true)
pipeline = Absinthe.Pipeline.for_document(unquote(schema), options)
Absinthe.Pipeline.run(query, pipeline |> Absinthe.Pipeline.upto(unquote(phase)))
end
end
end
end
defmodule ExperimentalNotationHelpers do
alias Absinthe.Blueprint
def lookup_type(mod, type_ident) do
Blueprint.Schema.lookup_type(mod.__absinthe_blueprint__(), type_ident)
end
def lookup_compiled_type(mod, type_ident) do
Absinthe.Schema.lookup_type(mod, type_ident)
end
def lookup_field(mod, type_ident, field_ident) do
type = Blueprint.Schema.lookup_type(mod.__absinthe_blueprint__(), type_ident)
Enum.find(type.fields, fn
%{identifier: ^field_ident} ->
true
_ ->
false
end)
end
def lookup_compiled_field(mod, type_ident, field_ident) do
case Absinthe.Schema.lookup_type(mod, type_ident) do
nil ->
nil
type ->
type.fields[field_ident]
end
end
def type_count(mod) do
mod.__absinthe_blueprint__().schema_definitions
|> List.first()
|> Map.fetch!(:types)
|> length
end
end
defmodule Absinthe.Fixtures.IdTestSchema do
use Absinthe.Schema
# Example data
@items %{
"foo" => %{id: "foo", name: "Foo"},
"bar" => %{id: "bar", name: "Bar"}
}
query do
field :item,
type: :item,
args: [
id: [type: non_null(:id)]
],
resolve: fn %{id: item_id}, _ ->
{:ok, @items[item_id]}
end
end
object :item do
description "An item"
field :id, :id
field :name, :string
end
end
defmodule Absinthe.Fixtures.SchemaWithDuplicateNames do
use Absinthe.Schema
query do
# Query type must exist
end
object :person do
description "A person"
field :name, :string
end
object :another_person, name: "Person" do
description "A person"
field :type, :string
end
end
defmodule Absinthe.Fixtures.ValidSchema do
use Absinthe.Schema
query do
# Query type must exist
end
object :person do
description "A person"
field :name, :string
end
end
defmodule Absinthe.Fixtures.SchemaWithDuplicateIdentifiers do
use Absinthe.Schema
query do
# Query type must exist
end
object :person do
description "A person"
field :name, :string
end
object :person, name: "APersonToo" do
description "A person"
field :name, :string
end
end
defmodule Absinthe.Fixtures.ImportTypes do
defmodule AccountTypes do
use Absinthe.Schema.Notation
object :customer do
field :id, non_null(:id)
field :name, :string
field :mailing_address, :mailing_address
field :contact_methods, list_of(:contact_method)
end
object :employee do
field :id, non_null(:id)
field :name, :string
field :avatar, :avatar
field :weekly_schedules, list_of(:weekly_schedule)
end
end
defmodule OrderTypes do
use Absinthe.Schema.Notation
object :order do
field :id, non_null(:id)
field :customer, non_null(:customer)
field :receipt, non_null(:receipt)
end
end
defmodule ReceiptTypes do
use Absinthe.Schema.Notation
object :receipt do
field :id, non_null(:id)
field :code, non_null(:string)
end
end
defmodule ScheduleTypes do
use Absinthe.Schema.Notation
object :weekly_schedule do
field :id, non_null(:id)
field :employee, non_null(:employee)
end
end
defmodule ProfileTypes do
use Absinthe.Schema.Notation
object :mailing_address do
field :street, non_null(list_of(:string))
field :city, non_null(:string)
field :state, non_null(:string)
field :postal_code, non_null(:string)
end
end
defmodule AuthTypes do
use Absinthe.Schema.Notation
object :contact_method do
field :kind, non_null(:contact_kind)
field :value, non_null(:string)
end
enum :contact_kind, values: [:email, :phone]
end
defmodule Shared.AvatarTypes do
use Absinthe.Schema.Notation
object :avatar do
field :height, non_null(:integer)
field :width, non_null(:integer)
field :url, non_null(:string)
end
end
defmodule Schema do
use Absinthe.Schema
import_types Absinthe.Fixtures.ImportTypes.{AccountTypes, OrderTypes}
import_types Absinthe.Fixtures.ImportTypes.ReceiptTypes
alias Absinthe.Fixtures.ImportTypes
import_types ImportTypes.ScheduleTypes
import_types ImportTypes.{ProfileTypes, AuthTypes, Shared.AvatarTypes}
query do
field :orders, list_of(:order)
field :employees, list_of(:employee)
field :customers, list_of(:customer)
end
end
end
defmodule Absinthe.Fixtures.PrefixSchema do
use Absinthe.Schema
query do
field :foo, :integer do
arg :bar, :string
end
field :__mything,
name: "__mything",
type: :string,
args: [
__myarg: [type: :integer]
],
resolve: fn _, _ ->
{:ok, %{name: "Test"}}
end
end
object :__mything, name: "__MyThing" do
field :name, :string
end
directive :__mydirective do
arg :__if, non_null(:boolean), description: "Skipped when true."
on Language.FragmentSpread
on Language.Field
on Language.InlineFragment
instruction fn
%{if: true} ->
:skip
_ ->
:include
end
end
end
defmodule Absinthe.TestSupport.Schemas.BadTypesSchema do
use Absinthe.Schema
@db %{
"foo" => %{id: "foo", name: "Foo", value: 4},
"bar" => %{id: "bar", name: "Bar", value: 5}
}
mutation do
field :update_thing,
type: :thing,
args: [
id: [type: non_null(:string)],
thing: [type: non_null(:input_thing_bad)]
],
resolve: fn
%{id: id, thing: %{value: val}}, _ ->
found = @db |> Map.get(id)
{:ok, %{found | value: val}}
%{id: id, thing: fields}, _ ->
found = @db |> Map.get(id)
{:ok, found |> Map.merge(fields)}
end
end
query do
field :version, :string
field :bad_resolution,
type: :thing,
resolve: fn _, _ ->
:not_expected
end
field :number,
type: :string,
args: [
val: [type: non_null(:integer)]
],
resolve: fn
%{val: v}, _ -> {:ok, v |> to_string}
args, _ -> {:error, "got #{inspect(args)}"}
end
field :thing_by_context,
type: :thing,
resolve: fn
_, %{context: %{thing: id}} ->
{:ok, @db |> Map.get(id)}
_, _ ->
{:error, "No :id context provided"}
end
field :thing,
type: :thing_bagel,
args: [
id: [
description: "id of the thing",
type: non_null(:string)
],
deprecated_arg: [
description: "This is a deprecated arg",
type: :string,
deprecate: true
],
deprecated_non_null_arg: [
description: "This is a non-null deprecated arg",
type: non_null(:string),
deprecate: true
],
deprecated_arg_with_reason: [
description: "This is a deprecated arg with a reason",
type: :string,
deprecate: "reason"
],
deprecated_non_null_arg_with_reason: [
description: "This is a non-null deprecated arg with a reasor",
type: non_null(:string),
deprecate: "reason"
]
],
resolve: fn %{id: id}, _ ->
{:ok, @db |> Map.get(id)}
end
field :deprecated_thing,
type: :thing,
args: [
id: [
description: "id of the thing",
type: non_null(:string)
]
],
resolve: fn %{id: id}, _ ->
{:ok, @db |> Map.get(id)}
end,
deprecate: true
field :deprecated_thing_with_reason,
type: :thing,
args: [
id: [
description: "id of the thing",
type: non_null(:string)
]
],
deprecate: "use `thing' instead",
resolve: fn %{id: id}, _ ->
{:ok, @db |> Map.get(id)}
end
end
input_object :input_thing do
description "A thing as input"
field :value, :integer_bbbbbb
field :deprecated_field, :string, deprecate: true
field :deprecated_field_with_reason, :string, deprecate: "reason"
field :deprecated_non_null_field, non_null(:string), deprecate: true
field :deprecated_non_null_field_with_reason, :string, deprecate: "reason"
end
object :thing do
description "A thing"
field :id, non_null(:string), description: "The ID of the thing"
field :name, :string, description: "The name of the thing"
field :value, :integer, description: "The value of the thing"
field :other_thing,
type: :thing,
resolve: fn _, %{source: %{id: id}} ->
case id do
"foo" -> {:ok, @db |> Map.get("bar")}
"bar" -> {:ok, @db |> Map.get("foo")}
end
end
end
end
defmodule Absinthe.TestSupport.Schema.BadInterfaceSchema do
use Absinthe.Schema
query do
field :foo, :foo
field :quux, :quux
field :span, :spam
end
object :foo do
field :not_name, :string
interface :named
interface :aged
is_type_of fn _ ->
true
end
end
object :quux do
field :not_name, :string
interface :foo
is_type_of fn _ ->
true
end
end
object :spam do
field :name, :string
interface :named
end
interface :named do
field :name, :string
end
interface :aged do
field :age, :integer
end
end
defmodule Absinthe.Fixtures.InvalidOutputTypesSchema do
use Absinthe.Schema
object :user do
end
input_object :foo do
field :blah, :user
end
query do
field :foo, :user do
arg :foo, :foo
end
end
end
defmodule Absinthe.Fixtures.InvalidInputTypesSchema do
use Absinthe.Schema
object :user do
end
input_object :input do
end
object :bad_object do
field :blah, :input
end
query do
field :foo, :user do
arg :invalid_arg, :user
end
end
end
defmodule Absinthe.Test.EmptySchema do
use Absinthe.Schema
end
defmodule Absinthe.Fixtures.ColorSchema do
use Absinthe.Schema
@names %{
r: "RED",
g: "GREEN",
b: "BLUE",
p: "PUCE"
}
@values %{
r: 100,
g: 200,
b: 300,
p: -100
}
query do
field :info,
type: :channel_info,
args: [
channel: [type: non_null(:channel), default_value: :r]
],
resolve: fn %{channel: channel}, _ ->
{:ok, %{name: @names[channel], value: @values[channel]}}
end
end
@desc "A color channel"
enum :channel do
@desc "The color red"
value :red, as: :r
@desc "The color green"
value :green, as: :g
value :blue, description: "The color blue", as: :b
value :puce, description: "The color puce", as: :p, deprecate: "it's ugly"
end
object :channel_info do
description """
Info about a channel
"""
field :name, :string
field :value, :integer
end
input_object :channel_input do
field :channel, :channel, default_value: :r
end
end
defmodule Absinthe.Fixtures.ContactSchema do
use Absinthe.Schema
@bruce %{name: "Bruce", age: 35}
@others [
%{name: "Joe", age: 21},
%{name: "Jill", age: 43}
]
@business %{name: "Someplace", employee_count: 11}
query do
field :person,
type: :person,
resolve: fn _, _ ->
{:ok, @bruce}
end
field :contact,
type: :contact,
args: [
business: [type: :boolean, default_value: false]
],
resolve: fn
%{business: false}, _ ->
{:ok, %{entity: @bruce}}
%{business: true}, _ ->
{:ok, %{entity: @business}}
end
field :first_search_result,
type: :search_result,
resolve: fn _, _ ->
{:ok, @bruce}
end
field :search_results,
type: non_null(list_of(non_null(:search_result))),
resolve: fn _, _ ->
{:ok, [@bruce, @business]}
end
field :profile,
type: :person,
args: [name: [type: non_null(:string)]],
resolve: fn
%{name: "Bruce"}, _ ->
{:ok, @bruce}
_, _ ->
{:ok, nil}
end
end
mutation do
field :person,
type: :person,
args: [
profile: [type: :profile_input]
],
resolve: fn %{profile: profile} ->
# Return it like it's a person
{:ok, profile}
end
end
subscription do
end
input_object :profile_input do
description "The basic details for a person"
field :code, type: non_null(:string)
field :name, type: :string, description: "The person's name", default_value: "Janet"
field :age, type: :integer, description: "The person's age", default_value: 43
end
interface :named_entity do
description "A named entity"
field :name, type: :string
resolve_type fn
%{age: _}, _ ->
:person
%{employee_count: _}, _ ->
:business
end
end
object :person do
description "A person"
field :name, :string
field :age, :integer
field :address, :string, deprecate: "change of privacy policy"
field :others,
type: list_of(:person),
resolve: fn _, _ ->
{:ok, @others}
end
interface :named_entity
end
object :business do
description "A business"
field :name, :string
field :employee_count, :integer
interface :named_entity
end
union :search_result do
description "A search result"
types [:business, :person]
resolve_type fn
%{age: _}, _ ->
:person
%{employee_count: _}, _ ->
:business
end
end
object :contact do
field :entity, :named_entity
import_fields :contact_method
end
object :contact_method do
field :phone_number, :string
field :address, :string
end
scalar :name do
serialize &to_string/1
parse fn
%Absinthe.Blueprint.Input.String{} = string ->
string.value
_ ->
:error
end
end
object :unused do
field :an_unused_field, :string
end
end
defmodule Absinthe.Fixtures.TimesSchema do
use Absinthe.Schema
query do
field :times, :integer do
arg :multiplier, :integer, default_value: 2
arg :base, non_null(:integer)
resolve fn
_, %{base: base, multiplier: nil}, _ ->
{:ok, base}
_, %{base: base, multiplier: num}, _ ->
{:ok, base * num}
_, %{base: _}, _ ->
{:error, "Didn't get any multiplier"}
end
end
end
end
defmodule Absinthe.Fixtures.NullListsSchema do
use Absinthe.Schema
query do
field :nullable_list, :list_details do
arg :input, list_of(:integer)
resolve fn
_, %{input: nil}, _ ->
{:ok, nil}
_, %{input: list}, _ ->
{
:ok,
%{
length: length(list),
content: list,
null_count: Enum.count(list, &(&1 == nil)),
non_null_count: Enum.count(list, &(&1 != nil))
}
}
end
end
field :non_nullable_list, :list_details do
arg :input, non_null(list_of(:integer))
resolve fn _, %{input: list}, _ ->
{
:ok,
%{
length: length(list),
content: list,
null_count: Enum.count(list, &(&1 == nil)),
non_null_count: Enum.count(list, &(&1 != nil))
}
}
end
end
field :nullable_list_of_non_nullable_type, :list_details do
arg :input, list_of(non_null(:integer))
resolve fn
_, %{input: nil}, _ ->
{:ok, nil}
_, %{input: list}, _ ->
{
:ok,
%{
length: length(list),
content: list,
null_count: Enum.count(list, &(&1 == nil)),
non_null_count: Enum.count(list, &(&1 != nil))
}
}
end
end
field :non_nullable_list_of_non_nullable_type, :list_details do
arg :input, non_null(list_of(non_null(:integer)))
resolve fn _, %{input: list}, _ ->
{
:ok,
%{
length: length(list),
content: list,
null_count: Enum.count(list, &(&1 == nil)),
non_null_count: Enum.count(list, &(&1 != nil))
}
}
end
end
end
object :list_details do
field :length, :integer
field :content, list_of(:integer)
field :null_count, :integer
field :non_null_count, :integer
end
end
defmodule Absinthe.Fixtures.OnlyQuerySchema do
use Absinthe.Schema
query do
field :hello, :string do
resolve fn _, _ -> {:ok, "world"} end
end
end
end
defmodule Absinthe.Fixtures.ObjectTimesSchema do
use Absinthe.Schema
query do
field :obj_times, :integer do
arg :input, non_null(:times_input)
resolve fn
_, %{input: %{base: base, multiplier: nil}}, _ ->
{:ok, base}
_, %{input: %{base: base, multiplier: num}}, _ ->
{:ok, base * num}
end
end
end
input_object :times_input do
field :multiplier, :integer, default_value: 2
field :base, non_null(:integer)
end
end
defmodule Absinthe.Fixtures.ThingsSchema do
use Absinthe.Schema
@db %{
"foo" => %{id: "foo", name: "Foo", value: 4},
"bar" => %{id: "bar", name: "Bar", value: 5}
}
enum :sigils_work, values: ~w(foo bar)a
enum :sigils_work_inside do
values(~w(foo bar)a)
end
enum :failure_type do
value :multiple
value :with_code
value :without_message
value :multiple_with_code
value :multiple_without_message
end
mutation do
field :update_thing,
type: :thing,
args: [
id: [type: non_null(:string)],
thing: [type: non_null(:input_thing)]
],
resolve: fn
%{id: id, thing: %{value: val}}, _ ->
found = @db |> Map.get(id)
{:ok, %{found | value: val}}
%{id: id, thing: fields}, _ ->
found = @db |> Map.get(id)
{:ok, found |> Map.merge(fields)}
end
field :failing_thing, type: :thing do
arg :type, type: :failure_type
resolve fn
%{type: :multiple}, _ ->
{:error, ["one", "two"]}
%{type: :with_code}, _ ->
{:error, message: "Custom Error", code: 42}
%{type: :without_message}, _ ->
{:error, code: 42}
%{type: :multiple_with_code}, _ ->
{:error, [%{message: "Custom Error 1", code: 1}, %{message: "Custom Error 2", code: 2}]}
%{type: :multiple_without_message}, _ ->
{:error, [%{message: "Custom Error 1", code: 1}, %{code: 2}]}
end
end
end
query do
field :version, :string
field :bad_resolution,
type: :thing,
resolve: fn _, _ ->
:not_expected
end
field :number,
type: :string,
args: [
val: [type: non_null(:integer)]
],
resolve: fn
%{val: v}, _ -> {:ok, v |> to_string}
args, _ -> {:error, "got #{inspect(args)}"}
end
field :thing_by_context,
type: :thing,
resolve: fn
_, %{context: %{thing: id}} ->
{:ok, @db |> Map.get(id)}
_, _ ->
{:error, "No :id context provided"}
end
field :things, list_of(:thing) do
resolve fn _, _ ->
{:ok, @db |> Map.values() |> Enum.sort_by(& &1.id)}
end
end
field :thing,
type: :thing,
args: [
id: [
description: "id of the thing",
type: non_null(:string)
],
deprecated_arg: [
description: "This is a deprecated arg",
type: :string,
deprecate: true
],
deprecated_non_null_arg: [
description: "This is a non-null deprecated arg",
type: non_null(:string),
deprecate: true
],
deprecated_arg_with_reason: [
description: "This is a deprecated arg with a reason",
type: :string,
deprecate: "reason"
],
deprecated_non_null_arg_with_reason: [
description: "This is a non-null deprecated arg with a reasor",
type: non_null(:string),
deprecate: "reason"
]
],
resolve: fn %{id: id}, _ ->
{:ok, @db |> Map.get(id)}
end
field :deprecated_thing,
type: :thing,
args: [
id: [
description: "id of the thing",
type: non_null(:string)
]
],
resolve: fn %{id: id}, _ ->
{:ok, @db |> Map.get(id)}
end,
deprecate: true
field :deprecated_thing_with_reason,
type: :thing,
args: [
id: [
description: "id of the thing",
type: non_null(:string)
]
],
deprecate: "use `thing' instead",
resolve: fn %{id: id}, _ ->
{:ok, @db |> Map.get(id)}
end
end
input_object :input_thing do
description "A thing as input"
field :value, :integer
field :deprecated_field, :string, deprecate: true
field :deprecated_field_with_reason, :string, deprecate: "reason"
field :deprecated_non_null_field, non_null(:string), deprecate: true
end
object :thing do
description "A thing"
field :fail, :id do
@desc "the id we want this field to fail on"
arg :id, :id
resolve fn
%{id: id}, %{id: id}, _ ->
{:error, "fail"}
%{id: id}, _, _ ->
{:ok, id}
end
end
field :id, non_null(:string), description: "The ID of the thing"
field :name, :string, description: "The name of the thing"
field :value, :integer, description: "The value of the thing"
field :other_thing,
type: :thing,
resolve: fn _, %{source: %{id: id}} ->
case id do
"foo" -> {:ok, @db |> Map.get("bar")}
"bar" -> {:ok, @db |> Map.get("foo")}
end
end
end
end
defmodule Absinthe.Fixtures.CustomTypesSchema do
use Absinthe.Schema
import_types Absinthe.Type.Custom
@custom_types %{
datetime: %DateTime{
year: 2017,
month: 1,
day: 27,
hour: 20,
minute: 31,
second: 55,
time_zone: "Etc/UTC",
zone_abbr: "UTC",
utc_offset: 0,
std_offset: 0
},
naive_datetime: ~N[2017-01-27 20:31:55],
date: ~D[2017-01-27],
time: ~T[20:31:55],
decimal: Decimal.new("-3.49")
}
query do
field :custom_types_query, :custom_types_object do
resolve fn _, _ -> {:ok, @custom_types} end
end
end
mutation do
field :custom_types_mutation, :result do
arg :args, :custom_types_input
resolve fn _, _ -> {:ok, %{message: "ok"}} end
end
end
object :custom_types_object do
field :datetime, :datetime
field :naive_datetime, :naive_datetime
field :date, :date
field :time, :time
field :decimal, :decimal
end
object :result do
field :message, :string
end
input_object :custom_types_input do
field :datetime, :datetime
field :naive_datetime, :naive_datetime
field :date, :date
field :time, :time
field :decimal, :decimal
end
end
defmodule Absinthe.Fixtures.PetsSchema do
use Absinthe.Schema
interface :being do
field :name, :string do
arg :surname, :boolean
end
end
interface :pet do
field :name, :string do
arg :surname, :boolean
end
end
interface :canine do
field :name, :string do
arg :surname, :boolean
end
end
enum :dog_command do
value :sit, as: 0
value :heel, as: 1
value :down, as: 2
end
object :dog do
is_type_of fn _ -> true end
field :name, :string do
arg :surname, :boolean
end
field :nickname, :string
field :bark_volume, :integer
field :barks, :boolean
field :does_know_command, :boolean do
arg :dog_command, :dog_command
end
field :is_housetrained, :boolean do
arg :at_other_homes, :boolean, default_value: true
end
field :is_at_location, :boolean do
arg :x, :integer
arg :y, :integer
end
interfaces [:being, :pet, :canine]
end
object :cat do
is_type_of fn _ -> true end
field :name, :string do
arg :surname, :boolean
end
field :nickname, :string
field :meows, :boolean
field :meow_volume, :integer
field :fur_color, :fur_color
interfaces [:being, :pet]
end
union :cat_or_dog do
types [:dog, :cat]
end
interface :intelligent do
field :iq, :integer
end
object :human do
is_type_of fn _ -> true end
interfaces [:being, :intelligent]
field :name, :string do
arg :surname, :boolean
end
field :pets, list_of(:pet)
field :relatives, list_of(:human)
field :iq, :integer
end
object :alien do
is_type_of fn _ -> true end
interfaces [:being, :intelligent]
field :iq, :integer
field :name, :string do
arg :surname, :boolean
end
field :num_eyes, :integer
end
union :dog_or_human do
types [:dog, :human]
end
union :human_or_alien do
types [:human, :alien]
end
enum :fur_color do
value :brown, as: 0
value :black, as: 1
value :tan, as: 2
value :spotted, as: 3
end
input_object :complex_input do
field :required_field, non_null(:boolean)
field :int_field, :integer
field :string_field, :string
field :boolean_field, :boolean
field :string_list_field, list_of(:string)
end
object :complicated_args do
field :int_arg_field, :string do
arg :int_arg, :integer
end
field :non_null_int_arg_field, :string do
arg :non_null_int_arg, non_null(:integer)
end
field :string_arg_field, :string do
arg :string_arg, :string
end
field :boolean_arg_field, :string do
arg :boolean_arg, :boolean
end
field :float_arg_field, :string do
arg :float_arg, :float
end
field :id_arg_field, :string do
arg :id_arg, :id
end
field :string_list_arg_field, :string do
arg :string_list_arg, list_of(:string)
end
field :string_list_of_list_arg_field, :string do
arg :string_list_of_list_arg, list_of(list_of(:string))
end
field :complex_arg_field, :string do
arg :complex_arg, :complex_input
end
field :multiple_reqs, :string do
arg :req1, non_null(:integer)
arg :req2, non_null(:integer)
end
field :multiple_opts, :string do
arg :opt1, :integer, default_value: 0
arg :opt2, :integer, default_value: 0
end
field :multiple_opt_and_req, :string do
arg :req1, non_null(:integer)
arg :req2, non_null(:integer)
arg :opt1, :integer, default_value: 0
arg :opt2, :integer, default_value: 0
end
end
query do
field :human, :human do
arg :id, :id
end
field :alien, :alien
field :dog, :dog
field :cat, :cat
field :pet, :pet
field :cat_or_dog, :cat_or_dog
field :dog_or_human, :dog_or_human
field :human_or_alien, :human_or_alien
field :complicated_args, :complicated_args
end
directive :on_query do
on [:query]
end
directive :on_mutation do
on [:mutation]
end
directive :on_subscription do
on [:subscription]
end
directive :on_field do
on [:field]
end
directive :on_fragment_definition do
on [:fragment_definition]
end
directive :on_fragment_spread do
on [:fragment_spread]
end
directive :on_inline_fragment do
on [:inline_fragment]
end
directive :on_schema do
on [:schema]
end
directive :on_scalar do
on [:scalar]
end
directive :on_object do
on [:object]
end
directive :on_field_definition do
on [:field_definition]
end
directive :on_argument_definition do
on [:argument_definition]
end
directive :on_interface do
on [:interface]
end
directive :on_union do
on [:union]
end
directive :on_enum do
on [:enum]
end
directive :on_enum_value do
on [:enum_value]
end
directive :on_input_object do
on [:input_object]
end
directive :on_input_field_definition do
on [:input_field_definition]
end
end
defmodule Absinthe.Fixtures.ArgumentsSchema do
use Absinthe.Schema
@res %{
true => "YES",
false => "NO"
}
scalar :input_name do
parse fn %{value: value} -> {:ok, %{first_name: value}} end
serialize fn %{first_name: name} -> name end
end
scalar :name do
serialize &to_string/1
parse fn
%Absinthe.Blueprint.Input.String{} = string ->
string.value
_ ->
:error
end
end
input_object :boolean_input_object do
field :flag, :boolean
end
input_object :contact_input do
field :email, non_null(:string)
field :contact_type, :contact_type
field :default_with_string, :string, default_value: "asdf"
field :nested_contact_input, :nested_contact_input
end
input_object :nested_contact_input do
field :email, non_null(:string)
end
enum :contact_type do
value :email, name: "Email", as: "Email"
value :phone
value :sms, deprecate: "Use phone instead"
end
input_object :input_stuff do
field :value, :integer
field :non_null_field, non_null(:string)
end
query do
field :stuff, :integer do
arg :stuff, non_null(:input_stuff)
resolve fn _, _ ->
{:ok, 14}
end
end
field :test_boolean_input_object, :boolean do
arg :input, non_null(:boolean_input_object)
resolve fn %{input: input}, _ ->
{:ok, input[:flag]}
end
end
field :contact, :contact_type do
arg :type, :contact_type
resolve fn args, _ -> {:ok, Map.get(args, :type)} end
end
field :contacts, list_of(:string) do
arg :contacts, non_null(list_of(:contact_input))
resolve fn %{contacts: contacts}, _ ->
{:ok, Enum.map(contacts, &Map.get(&1, :email))}
end
end
field :names, list_of(:input_name) do
arg :names, list_of(:input_name)
resolve fn %{names: names}, _ -> {:ok, names} end
end
field :list_of_lists, list_of(list_of(:string)) do
arg :items, list_of(list_of(:string))
resolve fn %{items: items}, _ ->
{:ok, items}
end
end
field :numbers, list_of(:integer) do
arg :numbers, list_of(:integer)
resolve fn %{numbers: numbers}, _ ->
{:ok, numbers}
end
end
field :user, :string do
arg :contact, :contact_input
resolve fn
%{contact: %{email: email} = contact}, _ ->
{:ok, "#{email}#{contact[:default_with_string]}"}
args, _ ->
{:error, "Got #{inspect(args)} instead"}
end
end
field :something,
type: :string,
args: [
name: [type: :input_name],
flag: [type: :boolean, default_value: false]
],
resolve: fn
%{name: %{first_name: name}}, _ ->
{:ok, name}
%{flag: val}, _ ->
{:ok, @res[val]}
_, _ ->
{:error, "No value provided for flag argument"}
end
field :required_thing, :string do
arg :name, non_null(:input_name)
resolve fn
%{name: %{first_name: name}}, _ -> {:ok, name}
args, _ -> {:error, "Got #{inspect(args)} instead"}
end
end
end
end
defmodule Absinthe.Case.Assertions.Schema do
import ExUnit.Assertions
def load_schema(name) do
Code.require_file("test/support/fixtures/dynamic/#{name}.exs")
end
@doc """
Assert a schema error occurs.
## Examples
```
iex> assert_schema_error("schema-name", [%{rule: Absinthe.Schema.Rule.TheRuleHere, data: :bar}])
```
"""
def assert_schema_error(schema_name, patterns) do
err =
assert_raise Absinthe.Schema.Error, fn ->
load_schema(schema_name)
end
patterns
|> Enum.filter(fn pattern ->
assert Enum.find(err.details, fn detail ->
pattern.rule == detail.rule && pattern.data == detail.data
end),
"Could not find error detail pattern #{inspect(pattern)} in #{inspect(err.details)}"
end)
assert length(patterns) == length(err.details)
end
def assert_notation_error(name) do
assert_raise(Absinthe.Schema.Notation.Error, fn ->
load_schema(name)
end)
end
end
defmodule Absinthe.Case.Assertions.Result do
import ExUnit.Assertions
def assert_result({lflag, lhs}, {rflag, rhs}) do
assert clean(lhs) == clean(rhs)
assert lflag == rflag
end
def assert_data(expected, result) do
assert_result({:ok, %{data: expected}}, result)
end
def assert_error_message_lines(lines, result) do
assert_error_message(Enum.join(lines, "\n"), result)
end
def assert_error_message(error_message, result) do
assert {:ok, %{errors: errors}} = result
assert Enum.any?(errors, fn %{message: message} ->
message == error_message
end)
end
defp clean(%{errors: errors} = result) do
cleaned =
errors
|> Enum.map(fn err ->
Map.delete(err, :locations)
end)
%{result | errors: cleaned}
end
defp clean(result) do
result
end
end
defmodule Absinthe.Case.Helpers.Run do
def run(document, schema, options \\ []) do
Absinthe.run(document, schema, options)
end
end
defmodule Absinthe.Case do
defmacro __using__(opts) do
quote do
use ExUnit.Case, unquote(opts)
import Absinthe.Case.Helpers.Run
import Absinthe.Case.Assertions.Result
import Absinthe.Case.Assertions.Schema
end
end
end
defmodule Absinthe.IntegrationCase.Definition do
@enforce_keys [:name, :schema, :graphql, :scenarios]
defstruct [
:name,
:schema,
:graphql,
scenarios: []
]
@type expect_exception :: {:raise, module}
@type expectation :: Absinthe.run_result() | expect_exception | :custom_assertion
@type scenario :: {Absinthe.run_opts(), expectation}
@type t :: %__MODULE__{
name: String.t(),
schema: Absinthe.Schema.t(),
graphql: String.t(),
scenarios: [scenario]
}
def create(name, graphql, default_schema, scenarios) do
%__MODULE__{
name: name,
graphql: graphql,
schema: normalize_schema(default_schema, graphql),
scenarios: normalize_scenarios(scenarios)
}
end
defp normalize_schema(default_schema, graphql) do
case Regex.run(~r/^#\s*schema:\s*(\S+)/i, graphql) do
nil ->
default_schema
[_, schema_name] ->
Module.concat(Absinthe.Fixtures, String.to_atom(schema_name))
end
end
defp normalize_scenarios(scenarios) do
List.wrap(scenarios)
|> Enum.map(&normalize_scenario/1)
end
defp normalize_scenario({_options, {_, _} = _result} = scenario), do: scenario
defp normalize_scenario(result), do: {[], result}
end
ExUnit.configure(exclude: [pending: true, pending_schema: true], timeout: 30_000)
ExUnit.start()
defmodule Absinthe.LexerTest do
use ExUnit.Case, async: true
@query """
{ foo }
"""
test "basic document" do
assert {:ok, [{:"{", {1, 1}}, {:name, {1, 3}, 'foo'}, {:"}", {1, 7}}]} =
Absinthe.Lexer.tokenize(@query)
end
@query """
{ nullName }
"""
test "document with a name that starts with a keyword" do
assert {:ok, [{:"{", {1, 1}}, {:name, {1, 3}, 'nullName'}, {:"}", {1, 12}}]} =
Absinthe.Lexer.tokenize(@query)
end
@query ~S"""
{
foo
}
"""
test "basic document, multiple lines" do
assert {:ok, [{:"{", {1, 1}}, {:name, {2, 3}, 'foo'}, {:"}", {3, 1}}]} =
Absinthe.Lexer.tokenize(@query)
end
@query """
{
foo(bar: \"""
stuff
\""")
}
"""
test "basic document, multiple lines with block string" do
assert {:ok,
[
{:"{", {1, 1}},
{:name, {2, 3}, 'foo'},
{:"(", {2, 6}},
{:name, {2, 7}, 'bar'},
{:":", {2, 10}},
{:block_string_value, {2, 12}, '"""\n stuff\n """'},
{:")", {4, 6}},
{:"}", {5, 1}}
]} = Absinthe.Lexer.tokenize(@query)
end
end
defmodule Absinthe.Middleware.AsyncTest do
use Absinthe.Case, async: true
defmodule Schema do
use Absinthe.Schema
query do
field :async_thing, :string do
resolve fn _, _, _ ->
async(fn ->
async(fn ->
{:ok, "we async now"}
end)
end)
end
end
field :other_async_thing, :string do
resolve cool_async(fn _, _, _ ->
{:ok, "magic"}
end)
end
field :returns_nil, :string do
resolve cool_async(fn _, _, _ ->
{:ok, nil}
end)
end
end
def cool_async(fun) do
fn _source, _args, _info ->
async(fn ->
{:middleware, Absinthe.Resolution, fun}
end)
end
end
end
test "can resolve a field using the normal async helper" do
doc = """
{asyncThing}
"""
assert {:ok, %{data: %{"asyncThing" => "we async now"}}} == Absinthe.run(doc, Schema)
end
test "can resolve a field using a cooler but probably confusing to some people helper" do
doc = """
{otherAsyncThing}
"""
assert {:ok, %{data: %{"otherAsyncThing" => "magic"}}} == Absinthe.run(doc, Schema)
end
test "can return nil from an async field safely" do
doc = """
{returnsNil}
"""
assert {:ok, %{data: %{"returnsNil" => nil}}} == Absinthe.run(doc, Schema)
end
end
defmodule Absinthe.Middleware.DataloaderTest do
use Absinthe.Case, async: true
defmodule Schema do
use Absinthe.Schema
import Absinthe.Resolution.Helpers
@organizations 1..3
|> Map.new(
&{&1,
%{
id: &1,
name: "Organization: ##{&1}"
}}
)
@users 1..3
|> Enum.map(
&%{
id: &1,
name: "User: ##{&1}",
organization_id: &1
}
)
def organizations(), do: @organizations
defp batch_load({:organization_id, %{pid: test_pid}}, sources) do
send(test_pid, :loading)
Map.new(sources, fn src ->
{src, Map.fetch!(@organizations, src.organization_id)}
end)
end
def dataloader() do
source = Dataloader.KV.new(&batch_load/2)
Dataloader.add_source(Dataloader.new(), :test, source)
end
def context(ctx) do
ctx
|> Map.put_new(:loader, dataloader())
|> Map.merge(%{
test_pid: self()
})
end
def plugins do
[Absinthe.Middleware.Dataloader] ++ Absinthe.Plugin.defaults()
end
object :organization do
field :id, :integer
field :name, :string
end
object :user do
field :name, :string
field :foo_organization, :organization do
resolve dataloader(:test, fn _, _, %{context: %{test_pid: pid}} ->
{:organization_id, %{pid: pid}}
end)
end
field :bar_organization, :organization do
resolve dataloader(:test, :organization_id, args: %{pid: self()})
end
end
query do
field :users, list_of(:user) do
resolve fn _, _, _ -> {:ok, @users} end
end
field :organization, :organization do
arg :id, non_null(:integer)
resolve fn _, %{id: id}, %{context: %{loader: loader, test_pid: test_pid}} ->
loader
|> Dataloader.load(:test, {:organization_id, %{pid: test_pid}}, %{organization_id: id})
|> Dataloader.put(
:test,
{:organization_id, %{pid: self()}},
%{organization_id: 123},
%{}
)
|> on_load(fn loader ->
{:ok,
Dataloader.get(loader, :test, {:organization_id, %{pid: test_pid}}, %{
organization_id: id
})}
end)
end
end
end
end
test "can resolve a field using the normal dataloader helper" do
doc = """
{
users {
organization: barOrganization {
name
}
}
}
"""
expected_data = %{
"users" => [
%{"organization" => %{"name" => "Organization: #1"}},
%{"organization" => %{"name" => "Organization: #2"}},
%{"organization" => %{"name" => "Organization: #3"}}
]
}
assert {:ok, %{data: data}} = Absinthe.run(doc, Schema)
assert expected_data == data
assert_receive(:loading)
refute_receive(:loading)
end
test "can resolve batched fields cross-query that have different data requirements" do
doc = """
{
users {
organization: fooOrganization {
name
}
}
organization(id: 1) {
id
}
}
"""
expected_data = %{
"users" => [
%{"organization" => %{"name" => "Organization: #1"}},
%{"organization" => %{"name" => "Organization: #2"}},
%{"organization" => %{"name" => "Organization: #3"}}
],
"organization" => %{"id" => 1}
}
assert {:ok, %{data: data}} = Absinthe.run(doc, Schema)
assert expected_data == data
assert_receive(:loading)
refute_receive(:loading)
end
test "using a cached field doesn't explode" do
doc = """
{
organization(id: 1) {
id
}
}
"""
expected_data = %{"organization" => %{"id" => 1}}
org = Schema.organizations()[1]
# Get the dataloader, and warm the cache for the organization key we're going
# to try to access via graphql.
dataloader =
Schema.dataloader()
|> Dataloader.put(:test, {:organization_id, %{pid: self()}}, %{organization_id: 1}, org)
context = %{
loader: dataloader
}
assert {:ok, %{data: data}} = Absinthe.run(doc, Schema, context: context)
assert expected_data == data
refute_receive(:loading)
end
end
defmodule Absinthe.Middleware.BatchTest do
use Absinthe.Case, async: true
defmodule Schema do
use Absinthe.Schema
@organizations 1..3
|> Map.new(
&{&1,
%{
id: &1,
name: "Organization: ##{&1}"
}}
)
@users 1..3
|> Enum.map(
&%{
id: &1,
name: "User: ##{&1}",
organization_id: &1
}
)
object :organization do
field :id, :integer
field :name, :string
end
object :user do
field :name, :string
field :organization, :organization do
resolve fn user, _, _ ->
batch({__MODULE__, :by_id}, user.organization_id, fn batch ->
{:ok, Map.get(batch, user.organization_id)}
end)
end
end
end
query do
field :users, list_of(:user) do
resolve fn _, _, _ -> {:ok, @users} end
end
field :organization, :organization do
arg :id, non_null(:integer)
resolve fn _, %{id: id}, _ ->
batch({__MODULE__, :by_id}, id, fn batch ->
{:ok, Map.get(batch, id)}
end)
end
end
end
def by_id(_, ids) do
Map.take(@organizations, ids)
end
end
test "can resolve a field using the normal async helper" do
doc = """
{
users {
organization {
name
}
}
}
"""
expected_data = %{
"users" => [
%{"organization" => %{"name" => "Organization: #1"}},
%{"organization" => %{"name" => "Organization: #2"}},
%{"organization" => %{"name" => "Organization: #3"}}
]
}
assert {:ok, %{data: data}} = Absinthe.run(doc, Schema)
assert expected_data == data
end
test "can resolve batched fields cross-query that have different data requirements" do
doc = """
{
users {
organization {
name
}
}
organization(id: 1) {
id
}
}
"""
expected_data = %{
"users" => [
%{"organization" => %{"name" => "Organization: #1"}},
%{"organization" => %{"name" => "Organization: #2"}},
%{"organization" => %{"name" => "Organization: #3"}}
],
"organization" => %{"id" => 1}
}
assert {:ok, %{data: data}} = Absinthe.run(doc, Schema)
assert expected_data == data
end
end
defmodule Absinthe.FragmentMergeTest do
use Absinthe.Case, async: true
defmodule Schema do
use Absinthe.Schema
object :user do
field :todos, list_of(:todo)
end
object :todo do
field :total_count, :integer
field :completed_count, :integer
end
query do
field :viewer, :user do
resolve fn _, _ ->
{:ok,
%{
todos: [%{total_count: 1, completed_count: 2}, %{total_count: 3, completed_count: 4}]
}}
end
end
end
end
test "it deep merges fields properly" do
doc = """
{
viewer {
...fragmentWithOneField
...fragmentWithOtherField
}
}
fragment fragmentWithOneField on User {
todos {
totalCount,
}
}
fragment fragmentWithOtherField on User {
todos {
completedCount
}
}
"""
expected = %{
"viewer" => %{
"todos" => [
%{"totalCount" => 1, "completedCount" => 2},
%{"totalCount" => 3, "completedCount" => 4}
]
}
}
assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema)
end
test "it deep merges duplicated fields properly" do
doc = """
{
viewer {
...fragmentWithOtherField
...fragmentWithOneField
}
}
fragment fragmentWithOneField on User {
todos {
totalCount,
completedCount
}
}
fragment fragmentWithOtherField on User {
todos {
completedCount
}
}
"""
expected = %{
"viewer" => %{
"todos" => [
%{"totalCount" => 1, "completedCount" => 2},
%{"totalCount" => 3, "completedCount" => 4}
]
}
}
assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema)
end
test "it deep merges fields properly different levels" do
doc = """
{
viewer {
...fragmentWithOneField
}
...fragmentWithOtherField
}
fragment fragmentWithOneField on User {
todos {
totalCount,
}
}
fragment fragmentWithOtherField on RootQueryType {
viewer {
todos {
completedCount
}
}
}
"""
expected = %{
"viewer" => %{
"todos" => [
%{"totalCount" => 1, "completedCount" => 2},
%{"totalCount" => 3, "completedCount" => 4}
]
}
}
assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema)
end
end
defmodule Absinthe.UnionFragmentTest do
use Absinthe.Case, async: true
defmodule Schema do
use Absinthe.Schema
object :user do
field :name, :string do
resolve fn user, _, _ -> {:ok, user.username} end
end
field :todos, list_of(:todo)
interface :named
end
object :todo do
field :name, :string do
resolve fn todo, _, _ -> {:ok, todo.title} end
end
field :completed, :boolean
interface :named
interface :completable
end
union :object do
types [:user, :todo]
resolve_type fn %{type: type}, _ -> type end
end
interface :named do
field :name, :string
resolve_type fn %{type: type}, _ -> type end
end
interface :completable do
field :completed, :boolean
resolve_type fn %{type: type}, _ -> type end
end
object :viewer do
field :objects, list_of(:object)
field :me, :user
field :named_thing, :named
end
query do
field :viewer, :viewer do
resolve fn _, _ ->
{:ok,
%{
objects: [
%{type: :user, username: "foo", completed: true},
%{type: :todo, title: "do stuff", completed: false},
%{type: :user, username: "bar"}
],
me: %{type: :user, username: "baz", todos: [], name: "should not be exposed"},
named_thing: %{type: :todo, title: "do stuff", completed: false}
}}
end
end
end
end
test "it queries a heterogeneous list properly" do
doc = """
{
viewer {
objects {
... on User {
__typename
name
}
... on Todo {
__typename
completed
}
}
}
}
"""
expected = %{
"viewer" => %{
"objects" => [
%{"__typename" => "User", "name" => "foo"},
%{"__typename" => "Todo", "completed" => false},
%{"__typename" => "User", "name" => "bar"}
]
}
}
assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema)
end
test "it queries an interface with the concrete type's field resolvers" do
doc = """
{
viewer {
me {
... on Named {
__typename
name
}
}
}
}
"""
expected = %{"viewer" => %{"me" => %{"__typename" => "User", "name" => "baz"}}}
assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema)
end
test "it queries an interface implemented by a union type" do
doc = """
{
viewer {
objects {
... on Named {
__typename
name
}
}
}
}
"""
expected = %{
"viewer" => %{
"objects" => [
%{"__typename" => "User", "name" => "foo"},
%{"__typename" => "Todo", "name" => "do stuff"},
%{"__typename" => "User", "name" => "bar"}
]
}
}
assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema)
end
test "it queries an interface on an unrelated interface" do
doc = """
{
viewer {
namedThing {
__typename
name
... on Completable {
completed
}
}
}
}
"""
expected = %{
"viewer" => %{
"namedThing" => %{"__typename" => "Todo", "name" => "do stuff", "completed" => false}
}
}
assert {:ok, %{data: expected}} == Absinthe.run(doc, Schema)
end
end
defmodule Absinthe.Phase.SchemaTest do
use Absinthe.Case, async: true
defmodule IntegerInputSchema do
use Absinthe.Schema
query do
field :test, :string do
arg :integer, :integer
resolve fn _, _, _ ->
{:ok, "ayup"}
end
end
end
end
describe "when given [Int] for Int schema node" do
@query """
{ test(integer: [1]) }
"""
test "doesn't raise an exception" do
assert {:ok, _} = run(@query)
end
end
def run(query) do
pipeline =
IntegerInputSchema
|> Absinthe.Pipeline.for_document([])
|> Absinthe.Pipeline.before(Absinthe.Phase.Schema)
with {:ok, bp, _} <- Absinthe.Pipeline.run(query, pipeline) do
Absinthe.Phase.Schema.run(bp, schema: IntegerInputSchema)
end
end
end
defmodule Absinthe.Phase.Parse.DescriptionsTest do
use Absinthe.Case, async: true
@moduletag :parser
@moduletag :sdl
@sdl """
\"""
A simple GraphQL schema which is well described.
\"""
type Query {
\"""
Translates a string from a given language into a different language.
\"""
translate(
"The original language that `text` is provided in."
fromLanguage: Language
"The translated language to be returned."
toLanguage: Language
"The text to be translated."
text: String
): String
}
\"""
The set of languages supported by `translate`.
\"""
enum Language {
"English"
EN
"French"
FR
"Chinese"
CH
}
"""
test "parses descriptions" do
assert {:ok,
%{
definitions: [
%Absinthe.Language.ObjectTypeDefinition{
description: "A simple GraphQL schema which is well described.",
fields: [
%Absinthe.Language.FieldDefinition{
arguments: [
%Absinthe.Language.InputValueDefinition{
description: "The original language that `text` is provided in."
},
%Absinthe.Language.InputValueDefinition{
description: "The translated language to be returned."
},
%Absinthe.Language.InputValueDefinition{
description: "The text to be translated."
}
],
description:
"Translates a string from a given language into a different language."
}
]
},
%Absinthe.Language.EnumTypeDefinition{
description: "The set of languages supported by `translate`.",
values: [
%Absinthe.Language.EnumValueDefinition{
description: "English"
},
%Absinthe.Language.EnumValueDefinition{
description: "French"
},
%Absinthe.Language.EnumValueDefinition{
description: "Chinese"
}
]
}
]
}} = run(@sdl)
end
def run(input) do
with {:ok, %{input: input}} <- Absinthe.Phase.Parse.run(input) do
{:ok, input}
end
end
end
defmodule Absinthe.Phase.Parse.BlockStringsTest do
use Absinthe.Case, async: true
@moduletag :parser
test "parses a query with a block string literal and no newlines" do
assert {:ok, result} = run(~S<{ post(title: "single", body: """text""") { name } }>)
assert "text" == extract_body(result)
end
test "parses a query with a block string argument that contains a quote" do
assert {:ok, result} = run(~S<{ post(title: "single", body: """text "here""") { name } }>)
assert "text \"here" == extract_body(result)
end
test "parses a query with a block string literal that contains various escapes" do
assert {:ok, result} =
run(
~s<{ post(title: "single", body: """unescaped \\n\\r\\b\\t\\f\\u1234""") { name } }>
)
assert "unescaped \\n\\r\\b\\t\\f\\u1234" == extract_body(result)
end
test "parses a query with a block string literal that contains various slashes" do
assert {:ok, result} =
run(~s<{ post(title: "single", body: """slashes \\\\ \\/""") { name } }>)
assert "slashes \\\\ \\/" == extract_body(result)
end
test "parses attributes when there are escapes" do
assert {:ok, result} = run(
~s<{ post(title: "title", body: "body\\\\") { name } }>
)
assert "body\\" == extract_body(result)
assert {:ok, result} = run(
~s<{ post(title: "title\\\\", body: "body") { name } }>
)
assert "body" == extract_body(result)
end
test "parse attributes where there are escapes on multiple lines" do
assert {:ok, result} = run(
~s<{ post(
title: "title",
body: "body\\\\"
) { name } }>
)
assert "body\\" == extract_body(result)
assert {:ok, result} = run(
~s<{ post(
title: "title\\\\",
body: "body"
) { name } }>
)
assert "body" == extract_body(result)
end
@input [
"",
" Hello,",
" World!",
"",
" Yours,",
" GraphQL."
]
@result [
"Hello,",
" World!",
"",
"Yours,",
" GraphQL."
]
test "parses a query with a block string literal, removing uniform indentation from a string" do
assert {:ok, result} =
run(~s<{ post(title: "single", body: """#{lines(@input)}""") { name } }>)
assert lines(@result) == extract_body(result)
end
@input [
"",
"",
" Hello,",
" World!",
"",
" Yours,",
" GraphQL.",
"",
""
]
@result [
"Hello,",
" World!",
"",
"Yours,",
" GraphQL."
]
test "parses a query with a block string literal, removing empty leading and trailing lines" do
assert {:ok, result} =
run(~s<{ post(title: "single", body: """#{lines(@input)}""") { name } }>)
assert lines(@result) == extract_body(result)
end
@input [
" ",
" ",
" Hello,",
" World!",
"",
" Yours,",
" GraphQL.",
" ",
" "
]
@result [
"Hello,",
" World!",
"",
"Yours,",
" GraphQL."
]
test "parses a query with a block string literal, removing blank leading and trailing lines" do
assert {:ok, result} =
run(~s<{ post(title: "single", body: """#{lines(@input)}""") { name } }>)
assert lines(@result) == extract_body(result)
end
@input [
" Hello,",
" World!",
"",
" Yours,",
" GraphQL."
]
@result [
" Hello,",
" World!",
"",
"Yours,",
" GraphQL."
]
test "parses a query with a block string literal, retaining indentation from first line" do
assert {:ok, result} =
run(~s<{ post(title: "single", body: """#{lines(@input)}""") { name } }>)
assert lines(@result) == extract_body(result)
end
@input [
" ",
" Hello, ",
" World! ",
" ",
" Yours, ",
" GraphQL. ",
" "
]
@result [
"Hello, ",
" World! ",
" ",
"Yours, ",
" GraphQL. "
]
test "parses a query with a block string literal, not altering trailing spaces" do
assert {:ok, result} =
run(~s<{ post(title: "single", body: """#{lines(@input)}""") { name } }>)
assert lines(@result) == extract_body(result)
end
test "parses a query with a block string literal and carriage returns, normalizing" do
assert {:ok, result} =
run(~s<{ post(title: "single", body: """text\nline\r\nanother""") { name } }>)
assert "text\nline\nanother" == extract_body(result)
end
test "parses a query with a block string literal with escaped triple quotes and no newlines" do
assert {:ok, result} = run(~S<{ post(title: "single", body: """text\""" """) { name } }>)
assert ~S<text""" > == extract_body(result)
end
test "returns an error for a bad byte" do
assert {:error, err} =
run(
~s<{ post(title: "single", body: """trying to escape a \u0000 byte""") { name } }>
)
assert "Parsing failed at" <> _ = extract_error_message(err)
end
test "parses a query with a block string literal as a variable default" do
assert {:ok, result} =
run(
~S<query ($body: String = """text""") { post(title: "single", body: $body) { name } }>
)
assert "text" ==
get_in(result, [
Access.key(:definitions, []),
Access.at(0),
Access.key(:variable_definitions, %{}),
Access.at(0),
Access.key(:default_value, %{}),
Access.key(:value, nil)
])
end
defp extract_error_message(err) do
get_in(err, [
Access.key(:execution, %{}),
Access.key(:validation_errors, []),
Access.at(0),
Access.key(:message, nil)
])
end
defp extract_body(value) do
get_in(value, [
Access.key(:definitions),
Access.at(0),
Access.key(:selection_set),
Access.key(:selections),
Access.at(0),
Access.key(:arguments),
Access.at(1),
Access.key(:value),
Access.key(:value)
])
end
def run(input) do
with {:ok, %{input: input}} <- Absinthe.Phase.Parse.run(input) do
{:ok, input}
end
end
defp lines(input) do
input
|> Enum.join("\n")
end
end
defmodule Absinthe.Phase.ParseTest do
use Absinthe.Case, async: true
@moduletag :parser
test "parses a simple query" do
assert {:ok, _} = run("{ user(id: 2) { name } }")
end
test "fails gracefully" do
assert {:error, _} = run("{ user(id: 2 { name } }")
end
@graphql """
query {
item(this-won't-lex)
}
"""
test "should wrap all lexer errors and return if not aborting to a phase" do
assert {:error, bp} = Absinthe.Phase.Parse.run(@graphql, jump_phases: false)
assert [
%Absinthe.Phase.Error{
extra: %{},
locations: [%{column: 12, line: 2}],
message: "Parsing failed at `-won't-lex`",
phase: Absinthe.Phase.Parse
}
] == bp.execution.validation_errors
end
@reserved ~w(query mutation subscription fragment on implements interface union scalar enum input extend)
test "can parse queries with arguments and variables that are 'reserved words'" do
@reserved
|> Enum.each(fn name ->
assert {:ok, _} =
run("""
mutation CreateThing($#{name}: Int!) {
createThing(#{name}: $#{name}) { clientThingId }
}
""")
end)
end
@query """
mutation {
likeStory(storyID: 12345) {
story {
likeCount
}
}
}
subscription {
viewer { likes }
}
"""
test "can parse mutations and subscriptions without names" do
assert {:ok, _} = run(@query)
end
@query """
mutation {
createUser(name: "Владимир") {
id
}
}
"""
test "can parse UTF-8" do
assert {:ok, _} = run(@query)
end
@query """
query Something($enum: String!) {
doSomething(directive: "thing") {
id
}
doSomething(directive: "thing") @schema(object: $enum) {
id
}
}
"""
test "can parse identifiers in different contexts" do
assert {:ok, _} = run(@query)
end
@query """
query Something($on: String!) {
on(on: "thing") {
id
}
doSomething(on: "thing") @on(on: $on) {
id
}
}
"""
test "can parse 'on' in different contexts" do
assert {:ok, _} = run(@query)
end
@query """
query QueryWithNullLiterals($name: String = null) {
fieldWithNullLiteral(name: $name, literalNull: null) @direct(arg: null)
}
"""
test "parses null value" do
assert {:ok, _} = run(@query)
end
@query ~S"""
mutation {
item(data: "{\"foo\": \"bar\"}") {
id
data
}
}
"""
test "can parse escaped strings as inputs" do
assert {:ok, res} = run(@query)
path = [
Access.key!(:definitions),
Access.at(0),
Access.key!(:selection_set),
Access.key!(:selections),
Access.at(0),
Access.key!(:arguments),
Access.at(0),
Access.key!(:value),
Access.key!(:value)
]
assert ~s({"foo": "bar"}) == get_in(res, path)
end
@query ~S"""
mutation {
item(data: "foo\nbar") {
id
data
}
}
"""
test "can parse escaped characters in inputs" do
assert {:ok, res} = run(@query)
path = [
Access.key!(:definitions),
Access.at(0),
Access.key!(:selection_set),
Access.key!(:selections),
Access.at(0),
Access.key!(:arguments),
Access.at(0),
Access.key!(:value),
Access.key!(:value)
]
assert ~s(foo\nbar) == get_in(res, path)
end
@query ~S"""
mutation {
item(data: "\" \\ \/ \b \f \n \r \t \u00F3 \u00f3 \u04F9") {
id
data
}
}
"""
test "can parse all types of characters escaped according to GraphQL spec as inputs" do
assert {:ok, res} = run(@query)
path = [
Access.key!(:definitions),
Access.at(0),
Access.key!(:selection_set),
Access.key!(:selections),
Access.at(0),
Access.key!(:arguments),
Access.at(0),
Access.key!(:value),
Access.key!(:value)
]
assert ~s(\" \\ \/ \b \f \n \r \t ó ó ӹ) == get_in(res, path)
end
def run(input) do
with {:ok, %{input: input}} <- Absinthe.Phase.Parse.run(input) do
{:ok, input}
end
end
end
defmodule Absinthe.Phase.Document.SchemaTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Phase, Pipeline, Type}
defmodule Schema do
use Absinthe.Schema
query do
field :books, list_of(:book)
end
mutation do
field :change_name, :book do
arg :id, non_null(:id)
arg :name, non_null(:string)
end
field :add_review, :review do
arg :info, non_null(:input_review)
end
end
object :book do
field :id, :id
field :name, :string
field :categories, list_of(:category)
field :reviews, list_of(:review)
end
subscription do
field :new_book, :book
end
object :category do
field(:name, :string)
end
object :review do
field :stars, :integer
field :text, :string
end
input_object :input_review do
field :stars, non_null(:integer)
field :text, :string
end
end
@pre_pipeline Pipeline.for_document(Schema) |> Pipeline.before(Phase.Schema)
@nameless_query """
{ books { name } }
"""
@query """
query Q($cats: Boolean!) {
books {
name
categories @include(if: $cats) {
... CategoryName
}
}
}
query BooksOnly {
books { ... BookName }
}
mutation ModifyBook($id: ID!, $name: String!) {
changeName(id: $id, name: $name) {
id
name
}
addReview(id: $id, info: {stars: 4})
}
subscription NewBooks {
newBook {
... on Book {
id
}
}
}
fragment BookName on Book {
name
}
fragment CategoryName on Category {
name
}
"""
describe ".run" do
test "sets the root schema field" do
{:ok, result} = input(@query)
assert result.schema == Schema
end
test "sets the query operation schema node" do
{:ok, result} = input(@query)
~w(Q BooksOnly)
|> Enum.each(fn name ->
node = op(result, name)
assert %Type.Object{identifier: :query} = node.schema_node
end)
end
test "sets the non-named query operation schema node" do
{:ok, result} = input(@nameless_query)
node = op(result, nil)
assert %Type.Object{identifier: :query} = node.schema_node
end
test "sets the mutation schema node" do
{:ok, result} = input(@query)
node = op(result, "ModifyBook")
assert %Type.Object{identifier: :mutation} = node.schema_node
end
test "sets the subscription schema node" do
{:ok, result} = input(@query)
node = op(result, "NewBooks")
assert %Type.Object{identifier: :subscription} = node.schema_node
end
test "sets the named fragment schema node" do
{:ok, result} = input(@query)
node = frag(result, "BookName")
assert %Type.Object{identifier: :book} = node.schema_node
end
test "sets the schema node for a named fragment field" do
{:ok, result} = input(@query)
fragment = frag(result, "BookName")
node = field(fragment, "name")
assert %Type.Field{identifier: :name} = node.schema_node
end
test "sets the inline fragment schema node" do
{:ok, result} = input(@query)
node = first_inline_frag(result)
assert %Type.Object{identifier: :book} = node.schema_node
end
test "sets the schema node for an inline fragment" do
{:ok, result} = input(@query)
fragment = first_inline_frag(result)
node = field(fragment, "id")
assert %Type.Field{identifier: :id} = node.schema_node
end
test "sets an operation field schema node" do
{:ok, result} = input(@query)
operation = op(result, "BooksOnly")
node = field(operation, "books")
assert %Type.Field{identifier: :books} = node.schema_node
end
test "sets an field schema node inside another field" do
{:ok, result} = input(@query)
operation = op(result, "Q")
books = field(operation, "books")
node = field(books, "name")
assert %Type.Field{identifier: :name} = node.schema_node
end
test "sets an operation field schema node supporting an adapter" do
{:ok, result} = input(@query)
node = named(result, Blueprint.Document.Field, "changeName")
assert %Type.Field{identifier: :change_name} = node.schema_node
end
test "sets directive schema nodes" do
{:ok, result} = input(@query)
directive = named(result, Blueprint.Directive, "include")
assert %Type.Directive{name: "include"} = directive.schema_node
end
test "sets field argument schema nodes" do
{:ok, result} = input(@query)
operation = op(result, "ModifyBook")
f = field(operation, "changeName")
node = named(f, Blueprint.Input.Argument, "id")
assert %Type.Argument{identifier: :id} = node.schema_node
end
test "sets field argument schema nodes supporting input objects" do
{:ok, result} = input(@query)
operation = op(result, "ModifyBook")
f = field(operation, "addReview")
top_node = named(f, Blueprint.Input.Argument, "info")
assert %Type.Argument{identifier: :info} = top_node.schema_node
node = top_node.input_value.normalized.fields |> List.first()
assert %Type.Field{identifier: :stars} = node.schema_node
assert %Type.NonNull{of_type: %Type.Scalar{identifier: :integer}} =
node.input_value.schema_node
end
test "sets directive argument schema nodes" do
{:ok, result} = input(@query)
directive = named(result, Blueprint.Directive, "include")
node = named(directive, Blueprint.Input.Argument, "if")
assert %Type.Argument{identifier: :if} = node.schema_node
end
end
defp first_inline_frag(blueprint) do
Blueprint.find(blueprint.operations, fn
%Blueprint.Document.Fragment.Inline{} ->
true
_ ->
false
end)
end
defp frag(blueprint, name) do
Blueprint.find(blueprint.fragments, fn
%Blueprint.Document.Fragment.Named{name: ^name} ->
true
_ ->
false
end)
end
defp op(blueprint, name) do
Blueprint.find(blueprint.operations, fn
%Blueprint.Document.Operation{name: ^name} ->
true
_ ->
false
end)
end
defp field(scope, name) do
Blueprint.find(scope.selections, fn
%Blueprint.Document.Field{name: ^name} ->
true
_ ->
false
end)
end
defp named(scope, mod, name) do
Blueprint.find(scope, fn
%{__struct__: ^mod, name: ^name} ->
true
_ ->
false
end)
end
defp input(query) do
blueprint(query)
|> Phase.Schema.run(schema: Schema)
end
defp blueprint(query) do
{:ok, blueprint, _} = Pipeline.run(query, @pre_pipeline)
blueprint
end
end
defmodule Absinthe.Phase.Document.ContextTest do
use Absinthe.Case, async: true
alias Absinthe.Pipeline
@context %{user: "Foo"}
@root %{version: "0.0.1"}
@compilation_pipeline Absinthe.Pipeline.for_document(nil, jump_phases: false)
|> Absinthe.Pipeline.before(Absinthe.Phase.Document.Variables)
defmodule TestSchema do
use Absinthe.Schema
query do
field :user, :string do
resolve(fn _root_value,
_args,
%{
context: %{
user: user
}
} ->
{:ok, user}
end)
end
field :version, :string do
resolve(fn root_value, _args, _res ->
{:ok, root_value.version}
end)
end
end
end
describe "when context contains some value" do
test "it is available during execution" do
result =
"""
query GetUser {
user
}
"""
|> compile()
|> execute()
assert result == %{data: %{"user" => "Foo"}}
end
end
describe "when root-value is set" do
test "it is available during execution" do
result =
"""
query GetVersion {
version
}
"""
|> compile()
|> execute()
assert result == %{data: %{"version" => @root.version}}
end
end
defp compile(query) do
{:ok, blueprint, _} = Pipeline.run(query, @compilation_pipeline)
blueprint
end
defp execute(blueprint) do
pipeline =
Absinthe.Pipeline.for_document(
TestSchema,
context: @context,
root_value: @root
)
start_phase =
case List.last(@compilation_pipeline) do
{mod, _} -> mod
mod -> mod
end
execution_pipeline = Absinthe.Pipeline.from(pipeline, start_phase)
{:ok, doc, _} = Pipeline.run(blueprint, execution_pipeline)
doc.result
end
end
defmodule Absinthe.Phase.Document.DirectivesTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Phase, Pipeline}
defmodule Schema do
use Absinthe.Schema
query do
field :books, list_of(:book)
end
object :book do
field :name, :string
field :categories, list_of(:category)
end
object :category do
field :name, :string
end
end
@query """
query Q($cats: Boolean!) {
books {
name
categories @include(if: $cats) {
... CategoryName
}
}
}
fragment CategoryName on Category {
name
}
"""
describe ".run with built-in @include" do
test "returns a blueprint" do
{:ok, result} = input(@query, %{"cats" => true})
assert %Blueprint{} = result
end
test "adds a :skip flag" do
{:ok, result} = input(@query, %{"cats" => false})
node = named(result, Blueprint.Document.Field, "categories")
assert Blueprint.flagged?(node, :skip)
end
test "adds an :include flag" do
{:ok, result} = input(@query, %{"cats" => true})
node = named(result, Blueprint.Document.Field, "categories")
assert Blueprint.flagged?(node, :include)
end
end
def input(query, values) do
blueprint(query, values)
|> Phase.Document.Directives.run()
end
defp blueprint(query, values) do
{:ok, blueprint, _} = Pipeline.run(query, pre_pipeline(values))
blueprint
end
# Get the document pipeline up to (but not including) this phase
defp pre_pipeline(values) do
Pipeline.for_document(Schema, variables: values, jump_phases: false)
|> Pipeline.before(Phase.Document.Directives)
end
defp named(scope, mod, name) do
Blueprint.find(scope, fn
%{__struct__: ^mod, name: ^name} ->
true
_ ->
false
end)
end
end
defmodule Absinthe.Phase.Document.ComplexityTest do
use Absinthe.PhaseCase,
phase: Absinthe.Phase.Document.Complexity.Result,
schema: __MODULE__.Schema,
async: true
defmodule Schema do
use Absinthe.Schema
query do
field :union_complexity, list_of(:search_result) do
resolve fn _, _ -> {:ok, :foo} end
end
field :foo_complexity, list_of(:foo) do
arg :limit, non_null(:integer)
complexity fn %{limit: limit}, child_complexity ->
5 + limit * child_complexity
end
end
field :context_aware_complexity, list_of(:foo) do
complexity penalize_guests(10)
end
field :discount_child_complexity, list_of(:foo) do
complexity fn _, child_complexity -> child_complexity - 1 end
end
field :nested_complexity, list_of(:quux) do
complexity fn _, child_complexity ->
5 * child_complexity
end
end
end
union :search_result do
types [:foo, :quux]
resolve_type fn
:foo, _ -> :foo
:quux, _ -> :quux
end
end
object :foo do
field :bar, :string
field :buzz, :integer
field :heavy, :string do
complexity 100
end
end
object :quux do
field :nested, :foo do
complexity 1
end
end
defp penalize_guests(penalty) do
fn
_, child_complexity, %{context: %{current_user: _}} ->
child_complexity + 1
_, child_complexity, _ ->
child_complexity + 1 + penalty
end
end
end
describe "analysing complexity a document" do
test "use union" do
doc = """
query UnionComplexity {
unionComplexity {
... on Foo {
bar
}
}
}
"""
assert {:ok, result, _} = run_phase(doc, operation_name: "UnionComplexity", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "UnionComplexity"))
assert op.complexity == 2
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == []
end
test "uses arguments and defaults to complexity of 1 for a field" do
doc = """
query ComplexityArg {
fooComplexity(limit: 3) {
bar
}
}
"""
assert {:ok, result, _} = run_phase(doc, operation_name: "ComplexityArg", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "ComplexityArg"))
assert op.complexity == 8
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == []
end
test "uses variable arguments" do
doc = """
query ComplexityVar($limit: Int!) {
fooComplexity(limit: $limit) {
bar
buzz
}
}
"""
assert {:ok, result, _} =
run_phase(doc, operation_name: "ComplexityVar", variables: %{"limit" => 5})
op = result.operations |> Enum.find(&(&1.name == "ComplexityVar"))
assert op.complexity == 15
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == []
end
test "supports access to context" do
doc = """
query ContextComplexity {
contextAwareComplexity {
bar
buzz
}
}
"""
assert {:ok, result, _} =
run_phase(
doc,
operation_name: "ContextComplexity",
variables: %{},
context: %{current_user: true}
)
op = result.operations |> Enum.find(&(&1.name == "ContextComplexity"))
assert op.complexity == 3
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == []
assert {:ok, result, _} =
run_phase(doc, operation_name: "ContextComplexity", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "ContextComplexity"))
assert op.complexity == 13
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == []
end
test "uses fragments" do
doc = """
query ComplexityFrag {
fooComplexity(limit: 7) {
bar
... FooFields
}
}
fragment FooFields on Foo {
buzz
}
"""
assert {:ok, result, _} = run_phase(doc, operation_name: "ComplexityFrag", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "ComplexityFrag"))
assert op.complexity == 19
end
test "raises error on negative complexity" do
doc = """
query ComplexityNeg {
fooComplexity(limit: -20) {
bar
}
}
"""
assert_raise Absinthe.AnalysisError, fn ->
run_phase(doc, operation_name: "ComplexityNeg", variables: %{})
end
end
test "does not error when complex child is discounted by parent" do
doc = """
query ComplexityDiscount {
discountChildComplexity {
heavy
}
}
"""
assert {:ok, result, _} =
run_phase(
doc,
operation_name: "ComplexityDiscount",
variables: %{},
max_complexity: 100
)
op = result.operations |> Enum.find(&(&1.name == "ComplexityDiscount"))
assert op.complexity == 99
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == []
end
test "errors when too complex" do
doc = """
query ComplexityError {
fooComplexity(limit: 1) {
bar
}
}
"""
assert {:error, result, _} =
run_phase(
doc,
operation_name: "ComplexityError",
variables: %{},
max_complexity: 5
)
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == [
"Field fooComplexity is too complex: complexity is 6 and maximum is 5",
"Operation ComplexityError is too complex: complexity is 6 and maximum is 5"
]
end
test "errors when too complex but not for discounted complex child" do
doc = """
query ComplexityNested {
nestedComplexity {
nested {
bar
heavy
}
}
}
"""
assert {:error, result, _} =
run_phase(
doc,
operation_name: "ComplexityNested",
variables: %{},
max_complexity: 4
)
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == [
"Field nestedComplexity is too complex: complexity is 5 and maximum is 4",
"Operation ComplexityNested is too complex: complexity is 5 and maximum is 4"
]
end
test "errors when too complex and nil operation name" do
doc = """
{
fooComplexity(limit: 1) {
heavy
}
}
"""
assert {:error, result, _} =
run_phase(doc, operation_name: nil, variables: %{}, max_complexity: 100)
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == [
"Field fooComplexity is too complex: complexity is 105 and maximum is 100",
"Operation is too complex: complexity is 105 and maximum is 100"
]
end
test "skips analysis when disabled" do
doc = """
query ComplexitySkip {
fooComplexity(limit: 3) {
bar
}
}
"""
assert {:ok, result, _} =
run_phase(
doc,
operation_name: "ComplexitySkip",
variables: %{},
max_complexity: 1,
analyze_complexity: false
)
op = result.operations |> Enum.find(&(&1.name == "ComplexitySkip"))
assert op.complexity == nil
errors = result.execution.validation_errors |> Enum.map(& &1.message)
assert errors == []
end
test "handles GraphQL introspection" do
doc = """
query IntrospectionQuery {
__schema {
types {
...FullType
}
}
}
fragment FullType on __Type {
fields {
args {
...InputValue
}
}
}
fragment InputValue on __InputValue {
type { name }
}
"""
assert {:ok, _, _} =
run_phase(
doc,
operation_name: "IntrospectionQuery",
variables: %{},
analyze_complexity: true
)
end
end
end
defmodule Absinthe.Phase.Document.VariablesTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Phase, Pipeline}
@pre_pipeline [Phase.Parse, Phase.Blueprint]
@query """
query Foo($id: ID!) {
foo(id: $id) {
bar
}
}
query Profile($age: Int = 36, $name: String!) {
profile(name: $name, age: $age) {
id
}
}
"""
describe "when not providing a value for an optional variable with a default value" do
test "uses the default value" do
result = input(@query, %{"name" => "Bruce"})
op = result.operations |> Enum.find(&(&1.name == "Profile"))
assert op.provided_values == %{
"age" => %Blueprint.Input.Integer{
value: 36,
source_location: %Blueprint.SourceLocation{column: 29, line: 6}
},
"name" => %Blueprint.Input.String{value: "Bruce"}
}
end
end
describe "when providing an explicit null value for an optional variable with a default value" do
test "uses null" do
result = input(@query, %{"name" => "Bruce", "age" => nil})
op = result.operations |> Enum.find(&(&1.name == "Profile"))
assert op.provided_values == %{
"age" => %Blueprint.Input.Null{},
"name" => %Blueprint.Input.String{value: "Bruce"}
}
end
end
describe "when providing a value for an optional variable with a default value" do
test "uses the default value" do
result = input(@query, %{"age" => 4, "name" => "Bruce"})
op = result.operations |> Enum.find(&(&1.name == "Profile"))
assert op.provided_values == %{
"age" => %Blueprint.Input.Integer{value: 4},
"name" => %Blueprint.Input.String{value: "Bruce"}
}
end
end
test "should prevent using non input types as variables" do
doc = """
query Foo($input: Thing) {
version
}
"""
expected = %{
errors: [
%{
locations: [%{column: 11, line: 1}],
message: "Variable \"input\" cannot be non-input type \"Thing\"."
},
%{
locations: [%{column: 11, line: 1}, %{column: 1, line: 1}],
message: "Variable \"input\" is never used in operation \"Foo\"."
}
]
}
assert {:ok, expected} == Absinthe.run(doc, Absinthe.Fixtures.ThingsSchema)
end
def input(query, values) do
{:ok, result} =
blueprint(query)
|> Phase.Document.Variables.run(variables: values)
result
end
defp blueprint(query) do
{:ok, blueprint, _} = Pipeline.run(query, @pre_pipeline)
blueprint
end
end
defmodule Absinthe.Phase.Document.Arguments.CoerceEnumsTest do
use Absinthe.PhaseCase,
phase: Absinthe.Phase.Document.Arguments.CoerceEnums,
schema: __MODULE__.Schema,
async: true
alias Absinthe.Blueprint
defmodule Schema do
use Absinthe.Schema
query do
field :foo_enum, :foo do
arg :input, :type
end
field :foo_non_null_enum, :foo do
arg :input, non_null(:type)
end
end
object :foo do
field :bar, :string
end
enum :type do
value :baz
end
end
describe "when using an Enum type input argument" do
test "coerces the type from String to Enum" do
doc = """
query Enum {
fooEnum(input: BAZ) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "Enum", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "Enum"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.Enum{value: "BAZ"} = input_argument.input_value.normalized
end
test "coerces the type from String to Enum when supplying variables" do
doc = """
query EnumVar($input: Type!) {
fooEnum(input: $input) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "EnumVar", variables: %{"input" => "BAZ"})
op = result.operations |> Enum.find(&(&1.name == "EnumVar"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.Enum{value: "BAZ"} = input_argument.input_value.normalized
end
end
describe "when using a non-null Enum type input argument" do
test "coerces the type from String to Enum" do
doc = """
query Enum {
fooNonNullEnum(input: BAZ) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "Enum", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "Enum"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.Enum{value: "BAZ"} = input_argument.input_value.normalized
end
test "coerces the type from String to Enum when supplying variables" do
doc = """
query EnumVar($input: Type!) {
fooNonNullEnum(input: $input) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "EnumVar", variables: %{"input" => "BAZ"})
op = result.operations |> Enum.find(&(&1.name == "EnumVar"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.Enum{value: "BAZ"} = input_argument.input_value.normalized
end
end
end
defmodule Absinthe.Phase.Document.Arguments.NormalizeTest do
use Absinthe.PhaseCase,
phase: Absinthe.Phase.Document.Arguments.Normalize,
schema: __MODULE__.Schema,
async: true
alias Absinthe.Blueprint
defmodule Schema do
use Absinthe.Schema
query do
field :foo, :foo do
arg :id, non_null(:id)
end
field :profile, :user do
arg :name, :string
arg :age, :integer
end
field :things, :things
end
object :foo do
field :bar, :string
end
object :user do
field :id, non_null(:id)
field :name, non_null(:string)
field :age, :integer
end
object :things do
field :items, list_of(:item) do
arg :id, :id
end
end
object :item do
field :id, :id
end
end
@query """
query Foo($id: ID!) {
foo(id: $id) {
bar
}
}
query Profile($age: Int = 36) {
profile(name: "Bruce", age: $age) {
id
}
}
"""
@fragment_query """
query Things($id: ID!) {
things {
... thingsFooFragment
}
}
fragment thingsFragment on Things {
items(id: $id) {
id
}
}
"""
describe "when not providing a value for an optional variable with a default value" do
test "uses the default value" do
{:ok, result, _} = run_phase(@query, variables: %{}, operation_name: "Profile")
op = result.operations |> Enum.find(&(&1.name == "Profile"))
field = op.selections |> List.first()
age_argument = field.arguments |> Enum.find(&(&1.name == "age"))
assert %Blueprint.Input.Integer{
value: 36,
source_location: %Blueprint.SourceLocation{column: 29, line: 6}
} == age_argument.input_value.normalized
name_argument = field.arguments |> Enum.find(&(&1.name == "name"))
assert %Blueprint.Input.String{
value: "Bruce",
source_location: %Blueprint.SourceLocation{column: 19, line: 7}
} == name_argument.input_value.normalized
end
end
describe "when providing a value for an optional variable with a default value" do
test "uses the default value" do
{:ok, result, _} = run_phase(@query, variables: %{"age" => 4}, operation_name: "Profile")
op = result.operations |> Enum.find(&(&1.name == "Profile"))
field = op.selections |> List.first()
age_argument = field.arguments |> Enum.find(&(&1.name == "age"))
assert %Blueprint.Input.Integer{value: 4} == age_argument.input_value.normalized
name_argument = field.arguments |> Enum.find(&(&1.name == "name"))
assert %Blueprint.Input.String{
value: "Bruce",
source_location: %Blueprint.SourceLocation{column: 19, line: 7}
} == name_argument.input_value.normalized
end
end
describe "when providing an input to a fragment" do
test "normalizes the input" do
{:ok, result, _} = run_phase(@fragment_query, variables: %{"id" => "foo"})
frag = result.fragments |> Enum.find(&(&1.name == "thingsFragment"))
field = frag.selections |> List.first()
id_argument = field.arguments |> Enum.find(&(&1.name == "id"))
assert %Blueprint.Input.String{value: "foo"} == id_argument.input_value.normalized
end
end
end
defmodule Absinthe.Phase.Document.Arguments.CoerceListsTest do
use Absinthe.PhaseCase,
phase: Absinthe.Phase.Document.Arguments.CoerceLists,
schema: __MODULE__.Schema,
async: true
alias Absinthe.Blueprint
defmodule Schema do
use Absinthe.Schema
query do
field :foo_int_list, :foo do
arg :input, list_of(:integer)
end
field :foo_wrapped_int_list, :foo do
arg :input, non_null(list_of(non_null(:integer)))
end
field :foo_wrapped_enum_list, :foo do
arg :input, non_null(list_of(non_null(:type)))
end
end
object :foo do
field :bar, :string
end
enum :type do
value :baz
end
end
describe "when using an List type input argument" do
test "coerces the type from a single element to List" do
doc = """
query List {
fooIntList(input: 42) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "List", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "List"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Integer{value: 42}}]
} = input_argument.input_value.normalized
end
test "coerces the type from a single element to List when supplying variables" do
doc = """
query ListVar($input: Int) {
fooIntList(input: $input) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "ListVar", variables: %{"input" => 42})
op = result.operations |> Enum.find(&(&1.name == "ListVar"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Integer{value: 42}}]
} = input_argument.input_value.normalized
end
end
describe "when using a wrapped List type input argument" do
test "coerces the type from a single element to List" do
doc = """
query List {
fooWrappedIntList(input: 42) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "List", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "List"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Integer{value: 42}}]
} = input_argument.input_value.normalized
end
test "coerces the type from a single element to List when supplying variables" do
doc = """
query ListVar($input: Int!) {
fooWrappedIntList(input: $input) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "ListVar", variables: %{"input" => 42})
op = result.operations |> Enum.find(&(&1.name == "ListVar"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Integer{value: 42}}]
} = input_argument.input_value.normalized
end
end
describe "when using a List of a coercable type input argument" do
test "coerces the type from a single element to List" do
doc = """
query List {
fooWrappedEnumList(input: BAZ) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "List", variables: %{})
op = result.operations |> Enum.find(&(&1.name == "List"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Enum{value: "BAZ"}}]
} = input_argument.input_value.normalized
end
test "coerces the type from a single element to List when supplying variables" do
doc = """
query ListVar($input: Type!) {
fooWrappedEnumList(input: $input) {
bar
}
}
"""
{:ok, result, _} = run_phase(doc, operation_name: "ListVar", variables: %{"input" => "BAZ"})
op = result.operations |> Enum.find(&(&1.name == "ListVar"))
field = op.selections |> List.first()
input_argument = field.arguments |> Enum.find(&(&1.name == "input"))
assert %Blueprint.Input.List{
items: [%Blueprint.Input.Value{normalized: %Blueprint.Input.Enum{value: "BAZ"}}]
} = input_argument.input_value.normalized
end
end
end
defmodule Absinthe.Phase.Document.Validation.UniqueVariableNamesTest do
@phase Absinthe.Phase.Document.Validation.UniqueVariableNames
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp duplicate_variable(name, line) do
bad_value(
Blueprint.Document.VariableDefinition,
@phase.error_message(name),
line,
name: name
)
end
describe "Validate: Unique variable names" do
test "unique variable names" do
assert_passes_validation(
"""
query A($x: Int, $y: String) { __typename }
query B($x: String, $y: Int) { __typename }
""",
[]
)
end
test "duplicate variable names" do
assert_fails_validation(
"""
query A($x: Int, $x: Int, $x: String) { __typename }
query B($x: String, $x: Int) { __typename }
query C($x: Int, $x: Int) { __typename }
""",
[],
[
duplicate_variable("x", 1),
duplicate_variable("x", 2),
duplicate_variable("x", 3)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.KnownArgumentNamesTest do
@phase Absinthe.Phase.Document.Validation.KnownArgumentNames
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.{Blueprint}
describe "Valid" do
test "single arg is known" do
assert_passes_validation(
"""
fragment argOnRequiredArg on Dog {
doesKnowCommand(dogCommand: SIT)
}
""",
[]
)
end
test "multiple args are known" do
assert_passes_validation(
"""
fragment multipleArgs on ComplicatedArgs {
multipleReqs(req1: 1, req2: 2)
}
""",
[]
)
end
test "multiple args in reverse order are known" do
assert_passes_validation(
"""
fragment multipleArgsReverseOrder on ComplicatedArgs {
multipleReqs(req2: 2, req1: 1)
}
""",
[]
)
end
test "no args on optional arg" do
assert_passes_validation(
"""
fragment noArgOnOptionalArg on Dog {
isHousetrained
}
""",
[]
)
end
test "args are known deeply" do
assert_passes_validation(
"""
{
dog {
doesKnowCommand(dogCommand: SIT)
}
human {
pet {
... on Dog {
doesKnowCommand(dogCommand: SIT)
}
}
}
}
""",
[]
)
end
test "directive args are known" do
assert_passes_validation(
"""
{
dog @skip(if: true)
}
""",
[]
)
end
end
describe "Invalid" do
test "undirective args are invalid" do
assert_fails_validation(
"""
{
dog @skip(unless: true)
}
""",
[],
[
bad_value(
Blueprint.Input.Argument,
@phase.directive_error_message("unless", "skip"),
2,
name: "unless"
)
]
)
end
test "invalid arg name" do
assert_fails_validation(
"""
fragment invalidArgName on Dog {
doesKnowCommand(unknown: true)
}
""",
[],
[
bad_value(
Blueprint.Input.Argument,
@phase.field_error_message("unknown", "doesKnowCommand", "Dog"),
2,
name: "unknown"
)
]
)
end
test "unknown args amongst known args" do
assert_fails_validation(
"""
fragment oneGoodArgOneInvalidArg on Dog {
doesKnowCommand(whoknows: 1, dogCommand: SIT, unknown: true)
}
""",
[],
[
bad_value(
Blueprint.Input.Argument,
@phase.field_error_message("unknown", "doesKnowCommand", "Dog"),
2,
name: "unknown"
),
bad_value(
Blueprint.Input.Argument,
@phase.field_error_message("whoknows", "doesKnowCommand", "Dog"),
2,
name: "whoknows"
)
]
)
end
test "unknown args deeply" do
assert_fails_validation(
"""
{
dog {
doesKnowCommand(unknown: true)
}
human {
pet {
... on Dog {
doesKnowCommand(unknown: true)
}
}
}
}
""",
[],
[
bad_value(
Blueprint.Input.Argument,
@phase.field_error_message("unknown", "doesKnowCommand", "Dog"),
3,
name: "unknown"
),
bad_value(
Blueprint.Input.Argument,
@phase.field_error_message("unknown", "doesKnowCommand", "Dog"),
8,
name: "unknown"
)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.SelectedCurrentOperationTest do
@phase Absinthe.Phase.Document.Validation.SelectedCurrentOperation
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp no_current_operation do
bad_value(
Blueprint,
@phase.error_message,
nil
)
end
describe "Given an operation name" do
test "passes when the operation is provided" do
assert_passes_validation(
"""
query Bar {
name
}
query Foo {
name
}
""",
operation_name: "Foo"
)
end
test "fails when the operation is not provided" do
assert_fails_validation(
"""
query Bar {
name
}
query Foo {
name
}
""",
[operation_name: "Nothere"],
no_current_operation()
)
end
end
describe "Not given an operation name" do
test "passes when only one operation is given and is named" do
assert_passes_validation(
"""
query Bar {
name
}
""",
[]
)
end
test "passes when only one operation is given anonymously" do
assert_passes_validation(
"""
{
name
}
""",
[]
)
end
test "fails when more that one operation is given" do
assert_fails_validation(
"""
query Bar {
name
}
query Foo {
name
}
""",
[],
no_current_operation()
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.NoUnusedVariablesTest do
@phase Absinthe.Phase.Document.Validation.NoUnusedVariables
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp unused_variable(name, operation_name, operation_line) do
bad_value(
Blueprint.Document.VariableDefinition,
@phase.error_message(name, operation_name),
operation_line,
name: name
)
end
describe "Validate: No unused variables" do
test "uses all variables" do
assert_passes_validation(
"""
query ($a: String, $b: String, $c: String) {
field(a: $a, b: $b, c: $c)
}
""",
[]
)
end
test "uses all variables deeply" do
assert_passes_validation(
"""
query Foo($a: String, $b: String, $c: String) {
field(a: $a) {
field(b: $b) {
field(c: $c)
}
}
}
""",
[]
)
end
test "uses all variables deeply in inline fragments" do
assert_passes_validation(
"""
query Foo($a: String, $b: String, $c: String) {
... on Type {
field(a: $a) {
field(b: $b) {
... on Type {
field(c: $c)
}
}
}
}
}
""",
[]
)
end
test "uses all variables in fragments" do
assert_passes_validation(
"""
query Foo($a: String, $b: String, $c: String) {
...FragA
}
fragment FragA on Type {
field(a: $a) {
...FragB
}
}
fragment FragB on Type {
field(b: $b) {
...FragC
}
}
fragment FragC on Type {
field(c: $c)
}
""",
[]
)
end
test "variable used by fragment in multiple operations" do
assert_passes_validation(
"""
query Foo($a: String) {
...FragA
}
query Bar($b: String) {
...FragB
}
fragment FragA on Type {
field(a: $a)
}
fragment FragB on Type {
field(b: $b)
}
""",
[]
)
end
test "variable used by recursive fragment" do
assert_passes_validation(
"""
query Foo($a: String) {
...FragA
}
fragment FragA on Type {
field(a: $a) {
...FragA
}
}
""",
[]
)
end
test "variable not used" do
assert_fails_validation(
"""
query ($a: String, $b: String, $c: String) {
field(a: $a, b: $b)
}
""",
[],
[
unused_variable("c", nil, 1)
]
)
end
test "multiple variables not used" do
assert_fails_validation(
"""
query Foo($a: String, $b: String, $c: String) {
field(b: $b)
}
""",
[],
[
unused_variable("a", "Foo", 1),
unused_variable("c", "Foo", 1)
]
)
end
test "variable not used in fragments" do
assert_fails_validation(
"""
query Foo($a: String, $b: String, $c: String) {
...FragA
}
fragment FragA on Type {
field(a: $a) {
...FragB
}
}
fragment FragB on Type {
field(b: $b) {
...FragC
}
}
fragment FragC on Type {
field
}
""",
[],
[
unused_variable("c", "Foo", 1)
]
)
end
test "multiple variables not used in fragments" do
assert_fails_validation(
"""
query Foo($a: String, $b: String, $c: String) {
...FragA
}
fragment FragA on Type {
field {
...FragB
}
}
fragment FragB on Type {
field(b: $b) {
...FragC
}
}
fragment FragC on Type {
field
}
""",
[],
[
unused_variable("a", "Foo", 1),
unused_variable("c", "Foo", 1)
]
)
end
test "variable not used by unreferenced fragment" do
assert_fails_validation(
"""
query Foo($b: String) {
...FragA
}
fragment FragA on Type {
field(a: $a)
}
fragment FragB on Type {
field(b: $b)
}
""",
[],
[
unused_variable("b", "Foo", 1)
]
)
end
test "variable not used by fragment used by other operation" do
assert_fails_validation(
"""
query Foo($b: String) {
...FragA
}
query Bar($a: String) {
...FragB
}
fragment FragA on Type {
field(a: $a)
}
fragment FragB on Type {
field(b: $b)
}
""",
[],
[
unused_variable("b", "Foo", 1),
unused_variable("a", "Bar", 4)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.UniqueOperationNamesTest do
@phase Absinthe.Phase.Document.Validation.UniqueOperationNames
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp duplicate_operation(name, line) do
bad_value(
Blueprint.Document.Operation,
@phase.error_message(name),
line,
name: name
)
end
describe "Validate: Unique operation names" do
test "no operations" do
assert_passes_validation(
"""
fragment fragA on Type {
field
}
""",
[]
)
end
test "one anon operation" do
assert_passes_validation(
"""
{
field
}
""",
[]
)
end
test "one named operation" do
assert_passes_validation(
"""
query Foo {
field
}
""",
[]
)
end
test "multiple operations" do
assert_passes_validation(
"""
query Foo {
field
}
query Bar {
field
}
""",
[]
)
end
test "multiple operations of different types" do
assert_passes_validation(
"""
query Foo {
field
}
mutation Bar {
field
}
subscription Baz {
field
}
""",
[]
)
end
test "fragment and operation named the same" do
assert_passes_validation(
"""
query Foo {
...Foo
}
fragment Foo on Type {
field
}
""",
[]
)
end
test "multiple operations of same name" do
assert_fails_validation(
"""
query Foo {
fieldA
}
query Foo {
fieldB
}
""",
[],
[
duplicate_operation("Foo", 1),
duplicate_operation("Foo", 4)
]
)
end
test "multiple ops of same name of different types (mutation)" do
assert_fails_validation(
"""
query Foo {
fieldA
}
mutation Foo {
fieldB
}
""",
[],
[
duplicate_operation("Foo", 1),
duplicate_operation("Foo", 4)
]
)
end
test "multiple ops of same name of different types (subscription)" do
assert_fails_validation(
"""
query Foo {
fieldA
}
subscription Foo {
fieldB
}
""",
[],
[
duplicate_operation("Foo", 1),
duplicate_operation("Foo", 4)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.ScalarLeafsTest do
@phase Absinthe.Phase.Document.Validation.ScalarLeafs
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.{Blueprint}
defp no_scalar_subselection(node_name, type_name, line) do
bad_value(
Blueprint.Document.Field,
@phase.no_subselection_allowed_message(node_name, type_name),
line,
name: node_name
)
end
defp missing_obj_subselection(node_name, type_name, line) do
bad_value(
Blueprint.Document.Field,
@phase.required_subselection_message(node_name, type_name),
line,
name: node_name
)
end
describe "Validate: Scalar leafs" do
test "valid scalar selection" do
assert_passes_validation(
"""
fragment scalarSelection on Dog {
barks
}
""",
[]
)
end
test "object type missing selection" do
assert_fails_validation(
"""
query directQueryOnObjectWithoutSubFields {
human
}
""",
[],
missing_obj_subselection("human", "Human", 2)
)
end
test "interface type missing selection" do
assert_fails_validation(
"""
{
human { pets }
}
""",
[],
missing_obj_subselection("pets", "[Pet]", 2)
)
end
test "valid scalar selection with args" do
assert_passes_validation(
"""
fragment scalarSelectionWithArgs on Dog {
doesKnowCommand(dogCommand: SIT)
}
""",
[]
)
end
test "scalar selection not allowed on Boolean" do
assert_fails_validation(
"""
fragment scalarSelectionsNotAllowedOnBoolean on Dog {
barks { sinceWhen }
}
""",
[],
no_scalar_subselection("barks", "Boolean", 2)
)
end
test "scalar selection not allowed on Enum" do
assert_fails_validation(
"""
fragment scalarSelectionsNotAllowedOnEnum on Cat {
furColor { inHexdec }
}
""",
[],
no_scalar_subselection("furColor", "FurColor", 2)
)
end
test "scalar selection not allowed with args" do
assert_fails_validation(
"""
fragment scalarSelectionsNotAllowedWithArgs on Dog {
doesKnowCommand(dogCommand: SIT) { sinceWhen }
}
""",
[],
no_scalar_subselection("doesKnowCommand", "Boolean", 2)
)
end
test "Scalar selection not allowed with directives" do
assert_fails_validation(
"""
fragment scalarSelectionsNotAllowedWithDirectives on Dog {
name @include(if: true) { isAlsoHumanName }
}
""",
[],
no_scalar_subselection("name", "String", 2)
)
end
test "Scalar selection not allowed with directives and args" do
assert_fails_validation(
"""
fragment scalarSelectionsNotAllowedWithDirectivesAndArgs on Dog {
doesKnowCommand(dogCommand: SIT) @include(if: true) { sinceWhen }
}
""",
[],
no_scalar_subselection("doesKnowCommand", "Boolean", 2)
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectTypeTest do
@phase Absinthe.Phase.Document.Validation.FieldsOnCorrectType
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp undefined_field(name, type_name, type_suggestions, field_suggestions, line) do
bad_value(
Blueprint.Document.Field,
@phase.error_message(name, type_name, type_suggestions, field_suggestions),
line,
name: name
)
end
describe "Validate: Fields on correct type" do
test "Object field selection" do
assert_passes_validation(
"""
fragment objectFieldSelection on Dog {
__typename
name
}
""",
[]
)
end
test "Aliased object field selection" do
assert_passes_validation(
"""
fragment aliasedObjectFieldSelection on Dog {
tn : __typename
otherName : name
}
""",
[]
)
end
test "Interface field selection" do
assert_passes_validation(
"""
fragment interfaceFieldSelection on Pet {
__typename
name
}
""",
[]
)
end
test "Aliased interface field selection" do
assert_passes_validation(
"""
fragment interfaceFieldSelection on Pet {
otherName : name
}
""",
[]
)
end
test "Lying alias selection" do
assert_passes_validation(
"""
fragment lyingAliasSelection on Dog {
name : nickname
}
""",
[]
)
end
test "Ignores fields on unknown type" do
assert_passes_validation(
"""
fragment unknownSelection on UnknownType {
unknownField
}
""",
[]
)
end
test "reports errors when type is known again" do
assert_fails_validation(
"""
fragment typeKnownAgain on Pet {
unknown_pet_field {
... on Cat {
unknown_cat_field
}
}
}
""",
[],
[
undefined_field("unknown_pet_field", "Pet", [], [], 2),
undefined_field("unknown_cat_field", "Cat", [], [], 4)
]
)
end
test "Field not defined on fragment" do
assert_fails_validation(
"""
fragment fieldNotDefined on Dog {
meowVolume
}
""",
[],
undefined_field("meowVolume", "Dog", [], ["barkVolume"], 2)
)
end
test "Ignores deeply unknown field" do
assert_fails_validation(
"""
fragment deepFieldNotDefined on Dog {
unknown_field {
deeper_unknown_field
}
}
""",
[],
undefined_field("unknown_field", "Dog", [], [], 2)
)
end
test "Sub-field not defined" do
assert_fails_validation(
"""
fragment subFieldNotDefined on Human {
pets {
unknown_field
}
}
""",
[],
undefined_field("unknown_field", "Pet", [], [], 3)
)
end
test "Field not defined on inline fragment" do
assert_fails_validation(
"""
fragment fieldNotDefined on Pet {
... on Dog {
meowVolume
}
}
""",
[],
undefined_field("meowVolume", "Dog", [], ["barkVolume"], 3)
)
end
test "Aliased field target not defined" do
assert_fails_validation(
"""
fragment aliasedFieldTargetNotDefined on Dog {
volume : mooVolume
}
""",
[],
undefined_field("mooVolume", "Dog", [], ["barkVolume"], 2)
)
end
test "Aliased lying field target not defined" do
assert_fails_validation(
"""
fragment aliasedLyingFieldTargetNotDefined on Dog {
barkVolume : kawVolume
}
""",
[],
undefined_field("kawVolume", "Dog", [], ["barkVolume"], 2)
)
end
test "Not defined on interface" do
assert_fails_validation(
"""
fragment notDefinedOnInterface on Pet {
tailLength
}
""",
[],
undefined_field("tailLength", "Pet", [], [], 2)
)
end
test "Defined on implementors but not on interface" do
assert_fails_validation(
"""
fragment definedOnImplementorsButNotInterface on Pet {
nickname
}
""",
[],
undefined_field("nickname", "Pet", ["Cat", "Dog"], ["name"], 2)
)
end
test "Meta field selection on union" do
assert_passes_validation(
"""
fragment directFieldSelectionOnUnion on CatOrDog {
__typename
}
""",
[]
)
end
test "Direct field selection on union" do
assert_fails_validation(
"""
fragment directFieldSelectionOnUnion on CatOrDog {
directField
}
""",
[],
undefined_field("directField", "CatOrDog", [], [], 2)
)
end
test "Defined on implementors queried on union" do
assert_fails_validation(
"""
fragment definedOnImplementorsQueriedOnUnion on CatOrDog {
name
}
""",
[],
undefined_field("name", "CatOrDog", ["Being", "Canine", "Cat", "Dog", "Pet"], [], 2)
)
end
test "valid field in inline fragment" do
assert_passes_validation(
"""
fragment objectFieldSelection on Pet {
... on Dog {
name
}
... {
name
}
}
""",
[]
)
end
test "fields on correct type error message: Works with no suggestions" do
assert ~s(Cannot query field "f" on type "T".) == @phase.error_message("f", "T", [], [])
end
test "fields on correct type error message: Works with no small numbers of type suggestions" do
assert ~s(Cannot query field "f" on type "T". Did you mean to use an inline fragment on "A" or "B"?) ==
@phase.error_message("f", "T", ["A", "B"], [])
end
test "fields on correct type error message: Works with no small numbers of field suggestions" do
assert ~s(Cannot query field "f" on type "T". Did you mean "z" or "y"?) ==
@phase.error_message("f", "T", [], ["z", "y"])
end
test "fields on correct type error message: Only shows one set of suggestions at a time, preferring types" do
assert ~s(Cannot query field "f" on type "T". Did you mean to use an inline fragment on "A" or "B"?) ==
@phase.error_message("f", "T", ["A", "B"], ["z", "y"])
end
test "fields on correct type error message: Limits lots of type suggestions" do
assert ~s(Cannot query field "f" on type "T". Did you mean to use an inline fragment on "A", "B", "C", "D", or "E"?) ==
@phase.error_message("f", "T", ["A", "B", "C", "D", "E", "F"], [])
end
test "fields on correct type error message: Limits lots of field suggestions" do
assert ~s(Cannot query field "f" on type "T". Did you mean "z", "y", "x", "w", or "v"?) ==
@phase.error_message("f", "T", [], ["z", "y", "x", "w", "v", "u"])
end
end
end
defmodule Absinthe.Phase.Document.Validation.KnownFragmentNamesTest do
@phase Absinthe.Phase.Document.Validation.KnownFragmentNames
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp undefined_fragment(name, line) do
bad_value(
Blueprint.Document.Fragment.Spread,
~s(Unknown fragment "#{name}"),
line,
name: name
)
end
describe "Validate: Known fragment names" do
test "known fragment names are valid" do
assert_passes_validation(
"""
{
human(id: 4) {
... HumanFields1
... on Human {
... HumanFields2
}
... {
name
}
}
}
fragment HumanFields1 on Human {
name
... HumanFields3
}
fragment HumanFields2 on Human {
name
}
fragment HumanFields3 on Human {
name
}
""",
[]
)
end
test "unknown fragment names are invalid" do
assert_fails_validation(
"""
{
human(id: 4) {
...UnknownFragment1
... on Human {
...UnknownFragment2
}
}
}
fragment HumanFields on Human {
name
...UnknownFragment3
}
""",
[],
[
undefined_fragment("UnknownFragment1", 3),
undefined_fragment("UnknownFragment2", 5),
undefined_fragment("UnknownFragment3", 11)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectTypeTest do
@phase Absinthe.Phase.Document.Validation.ArgumentsOfCorrectType
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.{Blueprint}
defp bad_argument(name, _expected_type, inspected_value, line, verbose_errors) do
bad_value(
Blueprint.Input.Argument,
error_message(name, inspected_value, verbose_errors),
line,
name: name
)
end
defp error_message(name, inspected_value, []) do
@phase.error_message(name, inspected_value)
end
defp error_message(name, inspected_value, verbose_errors) do
@phase.error_message(name, inspected_value) <> "\n" <> Enum.join(verbose_errors, "\n")
end
describe "Valid values" do
test "Good int value" do
assert_passes_validation(
"""
{
complicatedArgs {
intArgField(intArg: 2)
}
}
""",
[]
)
end
test "Good boolean value" do
assert_passes_validation(
"""
{
complicatedArgs {
booleanArgField(booleanArg: true)
}
}
""",
[]
)
end
test "Good string value" do
assert_passes_validation(
"""
{
complicatedArgs {
stringArgField(stringArg: "foo")
}
}
""",
[]
)
end
test "Good float value" do
assert_passes_validation(
"""
{
complicatedArgs {
floatArgField(floatArg: 1.1)
}
}
""",
[]
)
end
test "Int into Float" do
assert_passes_validation(
"""
{
complicatedArgs {
floatArgField(floatArg: 1)
}
}
""",
[]
)
end
test "Int into ID" do
assert_passes_validation(
"""
{
complicatedArgs {
idArgField(idArg: 1)
}
}
""",
[]
)
end
test "String into ID" do
assert_passes_validation(
"""
{
complicatedArgs {
idArgField(idArg: "someIdString")
}
}
""",
[]
)
end
test "Good enum value" do
assert_passes_validation(
"""
{
dog {
doesKnowCommand(dogCommand: SIT)
}
}
""",
[]
)
end
end
describe "Invalid String values" do
test "Int into String" do
assert_fails_validation(
"""
{
complicatedArgs {
stringArgField(stringArg: 1)
}
}
""",
[],
bad_argument("stringArg", "String", "1", 3, [])
)
end
test "Float into String" do
assert_fails_validation(
"""
{
complicatedArgs {
stringArgField(stringArg: 1.0)
}
}
""",
[],
bad_argument("stringArg", "String", "1.0", 3, [])
)
end
test "Boolean into String" do
assert_fails_validation(
"""
{
complicatedArgs {
stringArgField(stringArg: true)
}
}
""",
[],
bad_argument("stringArg", "String", "true", 3, [])
)
end
test "Unquoted String into String" do
assert_fails_validation(
"""
{
complicatedArgs {
stringArgField(stringArg: BAR)
}
}
""",
[],
bad_argument("stringArg", "String", "BAR", 3, [])
)
end
end
describe "Invalid Int values" do
test "String into Int" do
assert_fails_validation(
"""
{
complicatedArgs {
intArgField(intArg: "3")
}
}
""",
[],
bad_argument("intArg", "Int", ~s("3"), 3, [])
)
end
test "Big Int into Int" do
assert_fails_validation(
"""
{
complicatedArgs {
intArgField(intArg: 829384293849283498239482938)
}
}
""",
[],
bad_argument("intArg", "Int", "829384293849283498239482938", 3, [])
)
end
test "Unquoted String into Int" do
assert_fails_validation(
"""
{
complicatedArgs {
intArgField(intArg: FOO)
}
}
""",
[],
bad_argument("intArg", "Int", "FOO", 3, [])
)
end
test "Simple Float into Int" do
assert_fails_validation(
"""
{
complicatedArgs {
intArgField(intArg: 3.0)
}
}
""",
[],
bad_argument("intArg", "Int", "3.0", 3, [])
)
end
test "Float into Int" do
assert_fails_validation(
"""
{
complicatedArgs {
intArgField(intArg: 3.333)
}
}
""",
[],
bad_argument("intArg", "Int", "3.333", 3, [])
)
end
end
describe "Invalid Float values" do
test "String into Float" do
assert_fails_validation(
"""
{
complicatedArgs {
floatArgField(floatArg: "3.333")
}
}
""",
[],
bad_argument("floatArg", "Float", ~s("3.333"), 3, [])
)
end
test "Boolean into Float" do
assert_fails_validation(
"""
{
complicatedArgs {
floatArgField(floatArg: true)
}
}
""",
[],
bad_argument("floatArg", "Float", "true", 3, [])
)
end
test "Unquoted into Float" do
assert_fails_validation(
"""
{
complicatedArgs {
floatArgField(floatArg: FOO)
}
}
""",
[],
bad_argument("floatArg", "Float", "FOO", 3, [])
)
end
end
describe "Invalid Boolean value" do
test "Int into Boolean" do
assert_fails_validation(
"""
{
complicatedArgs {
booleanArgField(booleanArg: 2)
}
}
""",
[],
bad_argument("booleanArg", "Boolean", "2", 3, [])
)
end
test "Float into Boolean" do
assert_fails_validation(
"""
{
complicatedArgs {
booleanArgField(booleanArg: 1.0)
}
}
""",
[],
bad_argument("booleanArg", "Boolean", "1.0", 3, [])
)
end
test "String into Boolean" do
assert_fails_validation(
"""
{
complicatedArgs {
booleanArgField(booleanArg: "true")
}
}
""",
[],
bad_argument("booleanArg", "Boolean", ~s("true"), 3, [])
)
end
test "Unquoted into Boolean" do
assert_fails_validation(
"""
{
complicatedArgs {
booleanArgField(booleanArg: TRUE)
}
}
""",
[],
bad_argument("booleanArg", "Boolean", "TRUE", 3, [])
)
end
end
describe "Invalid ID value" do
test "Float into ID" do
assert_fails_validation(
"""
{
complicatedArgs {
idArgField(idArg: 1.0)
}
}
""",
[],
bad_argument("idArg", "ID", "1.0", 3, [])
)
end
test "Boolean into ID" do
assert_fails_validation(
"""
{
complicatedArgs {
idArgField(idArg: true)
}
}
""",
[],
bad_argument("idArg", "ID", "true", 3, [])
)
end
test "Unquoted into ID" do
assert_fails_validation(
"""
{
complicatedArgs {
idArgField(idArg: SOMETHING)
}
}
""",
[],
bad_argument("idArg", "ID", "SOMETHING", 3, [])
)
end
end
describe "Invalid Enum value" do
test "Int into Enum" do
assert_fails_validation(
"""
{
dog {
doesKnowCommand(dogCommand: 2)
}
}
""",
[],
bad_argument("dogCommand", "DogCommand", "2", 3, [])
)
end
test "Float into Enum" do
assert_fails_validation(
"""
{
dog {
doesKnowCommand(dogCommand: 1.0)
}
}
""",
[],
bad_argument("dogCommand", "DogCommand", "1.0", 3, [])
)
end
test "String into Enum" do
assert_fails_validation(
"""
{
dog {
doesKnowCommand(dogCommand: "SIT")
}
}
""",
[],
bad_argument("dogCommand", "DogCommand", ~s("SIT"), 3, [])
)
end
test "Boolean into Enum" do
assert_fails_validation(
"""
{
dog {
doesKnowCommand(dogCommand: true)
}
}
""",
[],
bad_argument("dogCommand", "DogCommand", "true", 3, [])
)
end
test "Unknown Enum Value into Enum" do
assert_fails_validation(
"""
{
dog {
doesKnowCommand(dogCommand: JUGGLE)
}
}
""",
[],
bad_argument("dogCommand", "DogCommand", "JUGGLE", 3, [])
)
end
test "Different case Enum Value into Enum" do
assert_fails_validation(
"""
{
dog {
doesKnowCommand(dogCommand: sit)
}
}
""",
[],
bad_argument("dogCommand", "DogCommand", "sit", 3, [])
)
end
end
describe "Valid List value" do
test "Good list value" do
assert_passes_validation(
"""
{
complicatedArgs {
stringListArgField(stringListArg: ["one", "two"])
}
}
""",
[]
)
end
test "Empty list value" do
assert_passes_validation(
"""
{
complicatedArgs {
stringListArgField(stringListArg: [])
}
}
""",
[]
)
end
test "Single value into List" do
assert_passes_validation(
"""
{
complicatedArgs {
stringListArgField(stringListArg: "one")
}
}
""",
[]
)
end
test "List of List" do
assert_passes_validation(
"""
{
complicatedArgs {
stringListOfListArgField(stringListOfListArg: [["one"], ["two", "three"]])
}
}
""",
[]
)
end
end
describe "Invalid List value" do
test "Incorrect item type" do
assert_fails_validation(
"""
{
complicatedArgs {
stringListArgField(stringListArg: ["one", 2])
}
}
""",
[],
[
bad_argument("stringListArg", "[String]", ~s(["one", 2]), 3, [
@phase.value_error_message(1, "String", "2")
])
]
)
end
test "Single value of incorrect type" do
assert_fails_validation(
"""
{
complicatedArgs {
stringListArgField(stringListArg: 1)
}
}
""",
[],
[
bad_argument("stringListArg", "[String]", "1", 3, [
@phase.value_error_message(0, "[String]", "1")
])
]
)
end
end
describe "Valid non-nullable value" do
test "Arg on optional arg" do
assert_passes_validation(
"""
{
dog {
isHousetrained(atOtherHomes: true)
}
}
""",
[]
)
end
test "No Arg on optional arg" do
assert_passes_validation(
"""
{
dog {
isHousetrained
}
}
""",
[]
)
end
test "Multiple args" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleReqs(req1: 1, req2: 2)
}
}
""",
[]
)
end
test "Multiple args reverse order" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleReqs(req2: 2, req1: 1)
}
}
""",
[]
)
end
test "No args on multiple optional" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOpts
}
}
""",
[]
)
end
test "One arg on multiple optional" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOpts(opt1: 1)
}
}
""",
[]
)
end
test "Second arg on multiple optional" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOpts(opt2: 1)
}
}
""",
[]
)
end
test "Multiple reqs on mixedList" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOptAndReq(req1: 3, req2: 4)
}
}
""",
[]
)
end
test "Multiple reqs and one opt on mixedList" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOptAndReq(req1: 3, req2: 4, opt1: 5)
}
}
""",
[]
)
end
test "All reqs and opts on mixedList" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6)
}
}
""",
[]
)
end
end
describe "Invalid non-nullable value" do
test "Incorrect value type" do
assert_fails_validation(
"""
{
complicatedArgs {
multipleReqs(req2: "two", req1: "one")
}
}
""",
[],
[
bad_argument("req2", "Int!", ~s("two"), 3, []),
bad_argument("req1", "Int!", ~s("one"), 3, [])
]
)
end
test "Incorrect value and missing argument" do
assert_fails_validation(
"""
{
complicatedArgs {
multipleReqs(req1: "one")
}
}
""",
[],
bad_argument("req1", "Int!", ~s("one"), 3, [])
)
end
end
describe "Valid input object value" do
test "Optional arg, despite required field in type" do
assert_passes_validation(
"""
{
complicatedArgs {
complexArgField
}
}
""",
[]
)
end
test "Partial object, only required" do
assert_passes_validation(
"""
{
complicatedArgs {
complexArgField(complexArg: { requiredField: true })
}
}
""",
[]
)
end
test "Partial object, required field can be falsey" do
assert_passes_validation(
"""
{
complicatedArgs {
complexArgField(complexArg: { requiredField: false })
}
}
""",
[]
)
end
test "Partial object, including required" do
assert_passes_validation(
"""
{
complicatedArgs {
complexArgField(complexArg: { requiredField: true, intField: 4 })
}
}
""",
[]
)
end
test "Full object" do
assert_passes_validation(
"""
{
complicatedArgs {
complexArgField(complexArg: {
requiredField: true,
intField: 4,
stringField: "foo",
booleanField: false,
stringListField: ["one", "two"]
})
}
}
""",
[]
)
end
test "Full object with fields in different order" do
assert_passes_validation(
"""
{
complicatedArgs {
complexArgField(complexArg: {
stringListField: ["one", "two"],
booleanField: false,
requiredField: true,
stringField: "foo",
intField: 4,
})
}
}
""",
[]
)
end
end
describe "Invalid input object value" do
test "Partial object, missing required" do
assert_fails_validation(
"""
{
complicatedArgs {
complexArgField(complexArg: { intField: 4 })
}
}
""",
[],
[
bad_argument("complexArg", "ComplexInput", "{intField: 4}", 3, [
@phase.value_error_message("requiredField", "Boolean!", "null")
])
]
)
end
test "Partial object, invalid field type" do
assert_fails_validation(
"""
{
complicatedArgs {
complexArgField(complexArg: {
stringListField: ["one", 2],
requiredField: true,
})
}
}
""",
[],
[
bad_argument(
"complexArg",
"ComplexInput",
~s({stringListField: ["one", 2], requiredField: true}),
3,
[
@phase.value_error_message("stringListField", "[String]", ~s(["one", 2])),
@phase.value_error_message(1, "String", "2")
]
)
]
)
end
test "Partial object, unknown field arg" do
assert_fails_validation(
"""
{
complicatedArgs {
complexArgField(complexArg: {
requiredField: true,
unknownField: "value"
})
}
}
""",
[],
bad_argument(
"complexArg",
"ComplexInput",
~s({requiredField: true, unknownField: "value"}),
3,
[
@phase.unknown_field_error_message("unknownField")
]
)
)
end
end
describe "Directive arguments" do
test "with directives of valid types" do
assert_passes_validation(
"""
{
dog @include(if: true) {
name
}
human @skip(if: false) {
name
}
}
""",
[]
)
end
test "with directive with incorrect types" do
assert_fails_validation(
"""
{
dog @include(if: "yes") {
name @skip(if: ENUM)
}
}
""",
[],
[
bad_argument("if", "Boolean!", ~s("yes"), 2, []),
bad_argument("if", "Boolean!", "ENUM", 3, [])
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.ProvidedAnOperationTest do
@phase Absinthe.Phase.Document.Validation.ProvidedAnOperation
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp no_operation do
bad_value(
Blueprint,
@phase.error_message,
nil
)
end
describe "Given an operation" do
test "passes" do
assert_passes_validation(
"""
query Bar {
name
}
""",
[]
)
end
end
describe "When empty" do
test "fails" do
assert_fails_validation(
"",
[],
no_operation()
)
end
end
describe "When given fragments" do
test "fails" do
assert_fails_validation(
"""
fragment Foo on QueryRootType {
name
}
""",
[],
no_operation()
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.NoFragmentCyclesTest do
use Absinthe.Case, async: true
alias Absinthe.{Phase, Pipeline}
@phase Absinthe.Phase.Document.Validation.NoFragmentCycles
describe ".run" do
test "should return ok if a fragment does not cycle" do
assert {:ok, _} =
"""
fragment nameFragment on Dog {
name
}
fragment ageFragment on Dog {
age
}
"""
|> run
end
test "should sort fragments properly" do
assert {:ok, %{fragments: fragments}} =
"""
fragment nameFragment on Dog {
name
}
fragment ageFragment on Dog {
age
...nameFragment
}
"""
|> run
assert ["nameFragment", "ageFragment"] = fragments |> Enum.map(& &1.name)
assert {:ok, %{fragments: fragments}} =
"""
fragment ageFragment on Dog {
age
...nameFragment
}
fragment nameFragment on Dog {
name
}
"""
|> run
assert ["nameFragment", "ageFragment"] = fragments |> Enum.map(& &1.name)
assert {:ok, %{fragments: fragments}} =
"""
fragment FullType on __Type {
fields {
args {
...InputValue
}
}
}
fragment InputValue on __InputValue {
type { name }
}
"""
|> run
assert ["InputValue", "FullType"] = fragments |> Enum.map(& &1.name)
end
test "should return an error if the named fragment tries to use itself" do
{:jump, blueprint, _} =
"""
fragment nameFragment on Dog {
name
...nameFragment
}
"""
|> run
message = @phase.error_message("nameFragment", ["nameFragment"])
assert Enum.find(blueprint.fragments, fn
%{name: "nameFragment", errors: [%{message: ^message}]} ->
true
_ ->
false
end)
end
test "should add errors to named fragments that form a cycle" do
{:jump, blueprint, _} =
"""
{
dog {
...foo
}
}
fragment foo on Dog {
name
...bar
}
fragment bar on Dog {
barkVolume
...baz
}
fragment baz on Dog {
age
...bar
...quux
}
fragment quux on Dog {
asdf
...foo
}
"""
|> run
quux_msg = @phase.error_message("quux", ~w(quux foo bar baz quux))
baz_msg = @phase.error_message("baz", ~w(baz quux foo bar baz))
assert Enum.find(blueprint.fragments, fn
%{name: "baz", errors: [%{message: ^baz_msg}]} ->
true
_ ->
false
end)
assert Enum.find(blueprint.fragments, fn
%{name: "quux", errors: [%{message: ^quux_msg}]} ->
true
_ ->
false
end)
end
end
def run(input) do
{:ok, blueprint, _phases} =
input
|> Pipeline.run([
Phase.Parse,
Phase.Blueprint
])
Phase.Document.Validation.NoFragmentCycles.run(blueprint, validation_result_phase: :stub)
end
end
defmodule Absinthe.Phase.Document.Validation.NoUnusedFragmentsTest do
@phase Absinthe.Phase.Document.Validation.NoUnusedFragments
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
@moduletag :pending
alias Absinthe.Blueprint
defp unused_fragment(name, line) do
bad_value(
Blueprint.Document.Fragment.Named,
@phase.error_message(name),
line,
name: name
)
end
describe "Validate: No unused fragments" do
test "all fragment names are used" do
assert_passes_validation(
"""
{
human(id: 4) {
...HumanFields1
... on Human {
...HumanFields2
}
}
}
fragment HumanFields1 on Human {
name
...HumanFields3
}
fragment HumanFields2 on Human {
name
}
fragment HumanFields3 on Human {
name
}
""",
[]
)
end
test "all fragment names are used by multiple operations" do
assert_passes_validation(
"""
query Foo {
human(id: 4) {
...HumanFields1
}
}
query Bar {
human(id: 4) {
...HumanFields2
}
}
fragment HumanFields1 on Human {
name
...HumanFields3
}
fragment HumanFields2 on Human {
name
}
fragment HumanFields3 on Human {
name
}
""",
[]
)
end
test "contains unknown fragments" do
assert_fails_validation(
"""
query Foo {
human(id: 4) {
...HumanFields1
}
}
query Bar {
human(id: 4) {
...HumanFields2
}
}
fragment HumanFields1 on Human {
name
...HumanFields3
}
fragment HumanFields2 on Human {
name
}
fragment HumanFields3 on Human {
name
}
fragment Unused1 on Human {
name
}
fragment Unused2 on Human {
name
}
""",
[],
[
unused_fragment("Unused1", 21),
unused_fragment("Unused2", 24)
]
)
end
test "contains unknown fragments with ref cycle" do
assert_fails_validation(
"""
query Foo {
human(id: 4) {
...HumanFields1
}
}
query Bar {
human(id: 4) {
...HumanFields2
}
}
fragment HumanFields1 on Human {
name
...HumanFields3
}
fragment HumanFields2 on Human {
name
}
fragment HumanFields3 on Human {
name
}
fragment Unused1 on Human {
name
...Unused2
}
fragment Unused2 on Human {
name
...Unused1
}
""",
[],
[
unused_fragment("Unused1", 21),
unused_fragment("Unused2", 25)
]
)
end
test "contains unknown and undef fragments" do
assert_fails_validation(
"""
query Foo {
human(id: 4) {
...bar
}
}
fragment foo on Human {
name
}
""",
[],
[
unused_fragment("foo", 6)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.UniqueInputFieldNamesTest do
@phase Absinthe.Phase.Document.Validation.UniqueInputFieldNames
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.{Blueprint}
defp duplicate(name, line, values) do
List.wrap(values)
|> Enum.map(fn value ->
bad_value(
Blueprint.Input.Field,
@phase.error_message,
line,
literal_value_check(name, value)
)
end)
end
defp literal_value_check(name, value) do
fn
%{name: ^name, input_value: %{normalized: %{value: ^value}}} ->
true
_ ->
false
end
end
describe "Validate: Unique input field names" do
test "input object with fields" do
assert_passes_validation(
"""
{
field(arg: { f: true })
}
""",
[]
)
end
test "same input object within two args" do
assert_passes_validation(
"""
{
field(arg1: { f: true }, arg2: { f: true })
}
""",
[]
)
end
test "multiple input object fields" do
assert_passes_validation(
"""
{
field(arg: { f1: "value", f2: "value", f3: "value" })
}
""",
[]
)
end
test "allows for nested input objects with similar fields" do
assert_passes_validation(
"""
{
field(arg: {
deep: {
deep: {
id: 1
}
id: 1
}
id: 1
})
}
""",
[]
)
end
test "duplicate input object fields" do
assert_fails_validation(
"""
{
field(arg: { f1: "value1", f1: "value2" })
}
""",
[],
duplicate("f1", 2, ~w(value1 value2))
)
end
test "many duplicate input object fields" do
assert_fails_validation(
"""
{
field(arg: { f1: "value1", f1: "value2", f1: "value3" })
}
""",
[],
duplicate("f1", 2, ~w(value1 value2 value3))
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.LoneAnonymousOperationTest do
@phase Absinthe.Phase.Document.Validation.LoneAnonymousOperation
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp anon_not_alone(line) do
bad_value(
Blueprint.Document.Operation,
"This anonymous operation must be the only defined operation.",
line
)
end
describe "Validate: Anonymous operation must be alone" do
test "no operations" do
assert_passes_validation(
"""
fragment fragA on Type {
field
}
""",
[]
)
end
test "one anon operation" do
assert_passes_validation(
"""
{
field
}
""",
[]
)
end
test "multiple named operations" do
assert_passes_validation(
"""
query Foo {
field
}
query Bar {
field
}
""",
[]
)
end
test "anon operation with fragment" do
assert_passes_validation(
"""
{
...Foo
}
fragment Foo on Type {
field
}
""",
[]
)
end
test "multiple anon operations" do
assert_fails_validation(
"""
{
fieldA
}
{
fieldB
}
""",
[],
[
anon_not_alone(1),
anon_not_alone(4)
]
)
end
test "anon operation with a mutation" do
assert_fails_validation(
"""
{
fieldA
}
mutation Foo {
fieldB
}
""",
[],
[
anon_not_alone(1)
]
)
end
test "anon operation with a subscription" do
assert_fails_validation(
"""
{
fieldA
}
subscription Foo {
fieldB
}
""",
[],
[
anon_not_alone(1)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.NoUndefinedVariablesTest do
@phase Absinthe.Phase.Document.Validation.NoUndefinedVariables
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp undefined_variable(name, variable_line, operation_name, operation_line) do
bad_value(
Blueprint.Input.Variable,
@phase.error_message(name, operation_name),
[variable_line, operation_line],
name: name
)
end
describe "Validate: No undefined variables" do
test "all variables defined" do
assert_passes_validation(
"""
query Foo($a: String, $b: String, $c: String) {
field(a: $a, b: $b, c: $c)
}
""",
[]
)
end
test "all variables deeply defined" do
assert_passes_validation(
"""
query Foo($a: String, $b: String, $c: String) {
field(a: $a) {
field(b: $b) {
field(c: $c)
}
}
}
""",
[]
)
end
test "all variables deeply in inline fragments defined" do
assert_passes_validation(
"""
query Foo($a: String, $b: String, $c: String) {
... on Type {
field(a: $a) {
field(b: $b) {
... on Type {
field(c: $c)
}
}
}
}
}
""",
[]
)
end
test "all variables in fragments deeply defined" do
assert_passes_validation(
"""
query Foo($a: String, $b: String, $c: String) {
...FragA
}
fragment FragA on Type {
field(a: $a) {
...FragB
}
}
fragment FragB on Type {
field(b: $b) {
...FragC
}
}
fragment FragC on Type {
field(c: $c)
}
""",
[]
)
end
test "variable within single fragment defined in multiple operations" do
assert_passes_validation(
"""
query Foo($a: String) {
...FragA
}
query Bar($a: String) {
...FragA
}
fragment FragA on Type {
field(a: $a)
}
""",
[]
)
end
test "variable within fragments defined in operations" do
assert_passes_validation(
"""
query Foo($a: String) {
...FragA
}
query Bar($b: String) {
...FragB
}
fragment FragA on Type {
field(a: $a)
}
fragment FragB on Type {
field(b: $b)
}
""",
[]
)
end
test "variable within recursive fragment defined" do
assert_passes_validation(
"""
query Foo($a: String) {
...FragA
}
fragment FragA on Type {
field(a: $a) {
...FragA
}
}
""",
[]
)
end
test "variable not defined" do
assert_fails_validation(
"""
query Foo($a: String, $b: String, $c: String) {
field(a: $a, b: $b, c: $c, d: $d)
}
""",
[],
[
undefined_variable("d", 2, "Foo", 1)
]
)
end
test "variable not defined by un-named query" do
assert_fails_validation(
"""
{
field(a: $a)
}
""",
[],
[
undefined_variable("a", 2, nil, 1)
]
)
end
test "multiple variables not defined" do
assert_fails_validation(
"""
query Foo($b: String) {
field(a: $a, b: $b, c: $c)
}
""",
[],
[
undefined_variable("a", 2, "Foo", 1),
undefined_variable("c", 2, "Foo", 1)
]
)
end
test "variable in fragment not defined by un-named query" do
assert_fails_validation(
"""
{
...FragA
}
fragment FragA on Type {
field(a: $a)
}
""",
[],
[
undefined_variable("a", 5, nil, 1)
]
)
end
test "variable in fragment not defined by operation" do
assert_fails_validation(
"""
query Foo($a: String, $b: String) {
...FragA
}
fragment FragA on Type {
field(a: $a) {
...FragB
}
}
fragment FragB on Type {
field(b: $b) {
...FragC
}
}
fragment FragC on Type {
field(c: $c)
}
""",
[],
[
undefined_variable("c", 15, "Foo", 1)
]
)
end
test "multiple variables in fragments not defined" do
assert_fails_validation(
"""
query Foo($b: String) {
...FragA
}
fragment FragA on Type {
field(a: $a) {
...FragB
}
}
fragment FragB on Type {
field(b: $b) {
...FragC
}
}
fragment FragC on Type {
field(c: $c)
}
""",
[],
[
undefined_variable("a", 5, "Foo", 1),
undefined_variable("c", 15, "Foo", 1)
]
)
end
test "single variable in fragment not defined by multiple operations" do
assert_fails_validation(
"""
query Foo($a: String) {
...FragAB
}
query Bar($a: String) {
...FragAB
}
fragment FragAB on Type {
field(a: $a, b: $b)
}
""",
[],
[
undefined_variable("b", 8, "Foo", 1),
undefined_variable("b", 8, "Bar", 4)
]
)
end
test "variables in fragment not defined by multiple operations" do
assert_fails_validation(
"""
query Foo($b: String) {
...FragAB
}
query Bar($a: String) {
...FragAB
}
fragment FragAB on Type {
field(a: $a, b: $b)
}
""",
[],
[
undefined_variable("a", 8, "Foo", 1),
undefined_variable("b", 8, "Bar", 4)
]
)
end
test "variable in fragment used by other operation" do
assert_fails_validation(
"""
query Foo($b: String) {
...FragA
}
query Bar($a: String) {
...FragB
}
fragment FragA on Type {
field(a: $a)
}
fragment FragB on Type {
field(b: $b)
}
""",
[],
[
undefined_variable("a", 8, "Foo", 1),
undefined_variable("b", 11, "Bar", 4)
]
)
end
test "multiple undefined variables produce multiple errors" do
assert_fails_validation(
"""
query Foo($b: String) {
...FragAB
}
query Bar($a: String) {
...FragAB
}
fragment FragAB on Type {
field1(a: $a, b: $b)
...FragC
field3(a: $a, b: $b)
}
fragment FragC on Type {
field2(c: $c)
}
""",
[],
[
undefined_variable("a", 8, "Foo", 1),
undefined_variable("a", 10, "Foo", 1),
undefined_variable("c", 13, "Foo", 1),
undefined_variable("b", 8, "Bar", 4),
undefined_variable("b", 10, "Bar", 4),
undefined_variable("c", 13, "Bar", 4)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.ProvidedNonNullArgumentsTest do
@phase Absinthe.Phase.Document.Validation.ProvidedNonNullArguments
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.{Blueprint}
test "ignores unknown arguments" do
assert_passes_validation(
"""
{
dog {
isHousetrained(unknownArgument: true)
}
}
""",
[]
)
end
describe "Valid non-nullable value" do
test "with a valid non-nullable value: Arg on optional arg" do
assert_passes_validation(
"""
{
dog {
isHousetrained(atOtherHomes: true)
}
}
""",
[]
)
end
test "with a valid non-nullable value: No Arg on optional arg" do
assert_passes_validation(
"""
{
dog {
isHousetrained
}
}
""",
[]
)
end
test "with a valid non-nullable value: Multiple args" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleReqs(req1: 1, req2: 2)
}
}
""",
[]
)
end
test "with a valid non-nullable value: Multiple args reverse order" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleReqs(req2: 2, req1: 1)
}
}
""",
[]
)
end
test "with a valid non-nullable value: No args on multiple optional" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOpts
}
}
""",
[]
)
end
test "with a valid non-nullable value: One arg on multiple optional" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOpts(opt1: 1)
}
}
""",
[]
)
end
test "with a valid non-nullable value: Second arg on multiple optional" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOpts(opt2: 1)
}
}
""",
[]
)
end
test "with a valid non-nullable value: Multiple reqs on mixedList" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOptAndReq(req1: 3, req2: 4)
}
}
""",
[]
)
end
test "with a valid non-nullable value: Multiple reqs and one opt on mixedList" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOptAndReq(req1: 3, req2: 4, opt1: 5)
}
}
""",
[]
)
end
test "with a valid non-nullable value: All reqs and opts on mixedList" do
assert_passes_validation(
"""
{
complicatedArgs {
multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6)
}
}
""",
[]
)
end
end
describe "Invalid non-nullable value" do
test "with an invalid non-nullable value: Missing one non-nullable argument" do
assert_fails_validation(
"""
{
complicatedArgs {
multipleReqs(req2: 2)
}
}
""",
[],
bad_value(Blueprint.Input.Argument, @phase.error_message("req1", "Int!"), 3, name: "req1")
)
end
test "with an invalid non-nullable value: Missing one non-nullable argument using a variable" do
assert_fails_validation(
"""
query WithReq1Blank($value: Int) {
complicatedArgs {
multipleReqs(req1: $value, req2: 2)
}
}
""",
[],
bad_value(Blueprint.Input.Argument, @phase.error_message("req1", "Int!"), 3, name: "req1")
)
end
test "with an invalid non-nullable value: Missing multiple non-nullable arguments" do
assert_fails_validation(
"""
{
complicatedArgs {
multipleReqs
}
}
""",
[],
[
bad_value(
Blueprint.Input.Argument,
@phase.error_message("req1", "Int!"),
3,
name: "req1"
),
bad_value(
Blueprint.Input.Argument,
@phase.error_message("req2", "Int!"),
3,
name: "req2"
)
]
)
end
test "with an invalid non-nullable value: Incorrect value and missing argument" do
assert_fails_validation(
"""
{
complicatedArgs {
multipleReqs(req1: "one")
}
}
""",
[],
bad_value(Blueprint.Input.Argument, @phase.error_message("req2", "Int!"), 3, name: "req2")
)
end
end
describe "Directive arguments" do
test "for directive arguments, ignores unknown directives" do
assert_passes_validation(
"""
{
dog @unknown
}
""",
[]
)
end
test "for directive arguments, with directives of valid types" do
assert_passes_validation(
"""
{
dog @include(if: true) {
name
}
human @skip(if: false) {
name
}
}
""",
[]
)
end
test "for directive arguments, with directive with missing types" do
assert_fails_validation(
"""
{
dog @include {
name @skip
}
}
""",
[],
[
bad_value(
Blueprint.Input.Argument,
@phase.error_message("if", "Boolean!"),
2,
name: "if"
),
bad_value(
Blueprint.Input.Argument,
@phase.error_message("if", "Boolean!"),
3,
name: "if"
)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.VariablesAreInputTypesTest do
@phase Absinthe.Phase.Document.Validation.VariablesAreInputTypes
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
defp non_input_type(name, type_rep, line) do
bad_value(
Blueprint.Document.VariableDefinition,
@phase.error_message(name, type_rep),
line,
name: name
)
end
describe "Validate: Variables are input types" do
test "input types are valid" do
assert_passes_validation(
"""
query Foo($a: String, $b: [Boolean!]!, $c: ComplexInput) {
field(a: $a, b: $b, c: $c)
}
""",
[]
)
end
test "output types are invalid" do
assert_fails_validation(
"""
query Foo($a: Dog, $b: [[CatOrDog!]]!, $c: Pet) {
field(a: $a, b: $b, c: $c)
}
""",
[],
[
non_input_type("a", "Dog", 1),
non_input_type("b", "[[CatOrDog!]]!", 1),
non_input_type("c", "Pet", 1)
]
)
end
end
end
defmodule Absinthe.Phase.Document.Validation.UniqueArgumentNamesTest do
@phase Absinthe.Phase.Document.Validation.UniqueArgumentNames
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.{Blueprint}
defp duplicate(name, line, values) do
List.wrap(values)
|> Enum.map(fn value ->
bad_value(
Blueprint.Input.Argument,
@phase.error_message,
line,
literal_value_check(name, value)
)
end)
end
defp literal_value_check(name, value) do
fn
%{name: ^name, input_value: %{normalized: %{value: ^value}}} ->
true
_ ->
false
end
end
describe "Validate: Unique argument names" do
test "no arguments on field" do
assert_passes_validation(
"""
{
field
}
""",
[]
)
end
test "no arguments on directive" do
assert_passes_validation(
"""
{
field @directive
}
""",
[]
)
end
test "argument on field" do
assert_passes_validation(
"""
{
field(arg: "value")
}
""",
[]
)
end
test "argument on directive" do
assert_passes_validation(
"""
{
field @directive(arg: "value")
}
""",
[]
)
end
test "same argument on two fields" do
assert_passes_validation(
"""
{
one: field(arg: "value")
two: field(arg: "value")
}
""",
[]
)
end
test "same argument on field and directive" do
assert_passes_validation(
"""
{
field(arg: "value") @directive(arg: "value")
}
""",
[]
)
end
test "same argument on two directives" do
assert_passes_validation(
"""
{
field @directive1(arg: "value") @directive2(arg: "value")
}
""",
[]
)
end
test "multiple field arguments" do
assert_passes_validation(
"""
{
field(arg1: "value", arg2: "value", arg3: "value")
}
""",
[]
)
end
test "multiple directive arguments" do
assert_passes_validation(
"""
{
field @directive(arg1: "value", arg2: "value", arg3: "value")
}
""",
[]
)
end
test "duplicate field arguments" do
assert_fails_validation(
"""
{
field(arg1: "value1", arg1: "value2")
}
""",
[],
duplicate("arg1", 2, ~w(value1 value2))
)
end
test "many duplicate field arguments" do
assert_fails_validation(
"""
{
field(arg1: "value1", arg1: "value2", arg1: "value3")
}
""",
[],
duplicate("arg1", 2, ~w(value1 value2 value3))
)
end
test "duplicate directive arguments" do
assert_fails_validation(
"""
{
field @directive(arg1: "value1", arg1: "value2")
}
""",
[],
duplicate("arg1", 2, ~w(value1 value2))
)
end
test "many duplicate directive arguments" do
assert_fails_validation(
"""
{
field @directive(arg1: "value1", arg1: "value2", arg1: "value3")
}
""",
[],
duplicate("arg1", 2, ~w(value1 value2 value3))
)
end
end
end
defmodule Absinthe.Phase.Schema.InlineFunctionsTest do
use ExUnit.Case, async: true
defmodule Schema do
use Absinthe.Schema.Notation
object :inlined do
field :direct, :string, resolve: &__MODULE__.foo/3
field :indirect, :string, resolve: indirection()
field :via_callback, :string
field :complexity_literal, :string do
complexity 1
end
end
object :not_inlined do
field :local_capture, :string, resolve: &foo/3
field :anon_function, :string, resolve: fn _, _, _ -> {:ok, "yo"} end
end
def foo(_, _, _), do: {:ok, "hey"}
defp indirection() do
&__MODULE__.foo/3
end
def middleware(_, %{identifier: :via_callback}, %{identifier: :inlined}) do
[{{Absinthe.Resolution, :call}, &__MODULE__.foo/3}]
end
def middleware(middleware, _a, _b) do
middleware
end
end
setup_all do
{:ok, %{bp: result()}}
end
describe "resolvers and middleware" do
test "are inlined when they are a literal external function", %{bp: bp} do
assert [
{{Absinthe.Resolution, :call}, &Schema.foo/3}
] == get_field(bp, :inlined, :direct).middleware
assert [
{{Absinthe.Resolution, :call}, &Schema.foo/3}
] == get_field(bp, :inlined, :indirect).middleware
assert [
{{Absinthe.Resolution, :call}, &Schema.foo/3}
] == get_field(bp, :inlined, :via_callback).middleware
end
test "aren't inlined if they're a local capture", %{bp: bp} do
assert [{{Absinthe.Middleware, :shim}, _}] =
get_field(bp, :not_inlined, :local_capture).middleware
end
end
describe "complexity" do
test "is inlined when it's a literal", %{bp: bp} do
assert 1 == get_field(bp, :inlined, :complexity_literal).complexity
end
end
defp get_field(%{schema_definitions: [schema]}, object, field) do
object = Enum.find(schema.type_artifacts, fn t -> t.identifier == object end)
Map.fetch!(object.fields, field)
end
def result() do
assert {:ok, bp, _} = Absinthe.Pipeline.run(Schema.__absinthe_blueprint__(), pipeline())
bp
end
def pipeline() do
Schema
|> Absinthe.Pipeline.for_schema()
|> Absinthe.Pipeline.from(Absinthe.Phase.Schema.Build)
|> Absinthe.Pipeline.upto(Absinthe.Phase.Schema.InlineFunctions)
end
end
defmodule Absinthe.Phase.Document.Execution.NonNullTest do
use ExUnit.Case, async: true
defmodule Schema do
use Absinthe.Schema
defp thing_resolver(_, %{make_null: make_null}, _) do
if make_null do
{:ok, nil}
else
{:ok, %{}}
end
end
defp thing_resolver(_, _, _) do
{:ok, %{}}
end
object :thing do
field :nullable, :thing do
arg :make_null, :boolean
resolve &thing_resolver/3
end
@desc """
A field declared to be non null.
It accepts an argument for testing that can be used to make it return null,
testing the null handling behaviour.
"""
field :non_null, non_null(:thing) do
arg :make_null, :boolean
resolve &thing_resolver/3
end
field :non_null_error_field, non_null(:string) do
resolve fn _, _ ->
{:error, "boom"}
end
end
end
query do
field :nullable, :thing do
arg :make_null, :boolean
resolve &thing_resolver/3
end
field :non_null_error_field, non_null(:string) do
resolve fn _, _ ->
{:error, "boom"}
end
end
field :nullable_list_of_nullable, list_of(:thing) do
resolve fn _, _ ->
{:ok, [%{}]}
end
end
field :nullable_list_of_non_null, list_of(non_null(:thing)) do
resolve fn _, _ ->
{:ok, [%{}]}
end
end
@desc """
A field declared to be non null.
It accepts an argument for testing that can be used to make it return null,
testing the null handling behaviour.
"""
field :non_null, non_null(:thing) do
arg :make_null, :boolean
resolve &thing_resolver/3
end
end
end
test "getting a null value normally works fine" do
doc = """
{
nullable { nullable(makeNull: true) { __typename }}
}
"""
assert {:ok, %{data: %{"nullable" => %{"nullable" => nil}}}} == Absinthe.run(doc, Schema)
end
test "returning nil from a non null field makes the parent nullable null" do
doc = """
{
nullable { nullable { nonNull(makeNull: true) { __typename }}}
}
"""
data = %{"nullable" => %{"nullable" => nil}}
errors = [
%{
locations: [%{column: 25, line: 2}],
message: "Cannot return null for non-nullable field",
path: ["nullable", "nullable", "nonNull"]
}
]
assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema)
end
test "error propogation to root field returns nil on data" do
doc = """
{
nullable { nullable { nonNullErrorField }}
}
"""
data = %{"nullable" => %{"nullable" => nil}}
errors = [
%{
locations: [%{column: 25, line: 2}],
message: "Cannot return null for non-nullable field",
path: ["nullable", "nullable", "nonNullErrorField"]
},
%{
locations: [%{column: 25, line: 2}],
message: "boom",
path: ["nullable", "nullable", "nonNullErrorField"]
}
]
assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema)
end
test "returning an error from a non null field makes the parent nullable null" do
doc = """
{
nonNull { nonNull { nonNullErrorField }}
}
"""
result = Absinthe.run(doc, Schema)
errors = [
%{
locations: [%{column: 23, line: 2}],
message: "Cannot return null for non-nullable field",
path: ["nonNull", "nonNull", "nonNullErrorField"]
},
%{
locations: [%{column: 23, line: 2}],
message: "boom",
path: ["nonNull", "nonNull", "nonNullErrorField"]
}
]
assert {:ok, %{data: nil, errors: errors}} == result
end
test "returning an error from a non null field makes the parent nullable null at arbitrary depth" do
doc = """
{
nullable { nonNull { nonNull { nonNull { nonNull { nonNullErrorField }}}}}
}
"""
data = %{"nullable" => nil}
path = ["nullable", "nonNull", "nonNull", "nonNull", "nonNull", "nonNullErrorField"]
errors = [
%{
locations: [%{column: 54, line: 2}],
message: "Cannot return null for non-nullable field",
path: path
},
%{locations: [%{column: 54, line: 2}], message: "boom", path: path}
]
assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema)
end
describe "lists" do
test "list of nullable things works when child has a null violation" do
doc = """
{
nullableListOfNullable { nonNull(makeNull: true) { __typename } }
}
"""
data = %{"nullableListOfNullable" => [nil]}
errors = [
%{
locations: [%{column: 28, line: 2}],
message: "Cannot return null for non-nullable field",
path: ["nullableListOfNullable", 0, "nonNull"]
}
]
assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema)
end
@tag :syntax
test "list of non null things works when child has a null violation" do
doc = """
{
nullableListOfNonNull { nonNull(makeNull: true) { __typename } }
}
"""
data = %{"nullableListOfNonNull" => nil}
errors = [
%{
locations: [%{column: 27, line: 2}],
message: "Cannot return null for non-nullable field",
path: ["nullableListOfNonNull", 0, "nonNull"]
}
]
assert {:ok, %{data: data, errors: errors}} == Absinthe.run(doc, Schema)
end
end
end
defmodule Absinthe.Phase.Validation.KnownDirectivesTest do
@phase Absinthe.Phase.Validation.KnownDirectives
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.{Blueprint}
def unknown_directive(name, line) do
bad_value(
Blueprint.Directive,
"Unknown directive.",
line,
name: name
)
end
def misplaced_directive(name, placement, line) do
bad_value(
Blueprint.Directive,
"May not be used on #{placement}.",
line,
name: name
)
end
test "with no directives" do
assert_passes_validation(
"""
query Foo {
name
...Frag
}
fragment Frag on Dog {
name
}
""",
[]
)
end
test "with known directives" do
assert_passes_validation(
"""
{
dog @include(if: true) {
name
}
human @skip(if: false) {
name
}
}
""",
[]
)
end
test "with unknown directive" do
assert_fails_validation(
"""
{
dog @unknown(directive: "value") {
name
}
}
""",
[],
[
unknown_directive("unknown", 2)
]
)
end
test "with many unknown directives" do
assert_fails_validation(
"""
{
dog @unknown(directive: "value") {
name
}
human @unknown(directive: "value") {
name
pets @unknown(directive: "value") {
name
}
}
}
""",
[],
[
unknown_directive("unknown", 2),
unknown_directive("unknown", 5),
unknown_directive("unknown", 7)
]
)
end
test "with well placed directives" do
assert_passes_validation(
"""
query Foo @onQuery {
name @include(if: true)
...Frag @include(if: true)
skippedField @skip(if: true)
...SkippedFrag @skip(if: true)
}
mutation Bar @onMutation {
someField
}
""",
[]
)
end
test "with misplaced directives" do
assert_fails_validation(
"""
query Foo @include(if: true) {
name @onQuery
...Frag @onQuery
}
mutation Bar @onQuery {
someField
}
""",
[],
[
misplaced_directive("include", "QUERY", 1),
misplaced_directive("onQuery", "FIELD", 2),
misplaced_directive("onQuery", "FRAGMENT_SPREAD", 3),
misplaced_directive("onQuery", "MUTATION", 6)
]
)
end
describe "within schema language" do
@tag :pending_schema
test "with well placed directives" do
assert_passes_validation(
"""
type MyObj implements MyInterface @onObject {
myField(myArg: Int @onArgumentDefinition): String @onFieldDefinition
}
scalar MyScalar @onScalar
interface MyInterface @onInterface {
myField(myArg: Int @onArgumentDefinition): String @onFieldDefinition
}
union MyUnion @onUnion = MyObj | Other
enum MyEnum @onEnum {
MY_VALUE @onEnumValue
}
input MyInput @onInputObject {
myField: Int @onInputFieldDefinition
}
schema @onSchema {
query: MyQuery
}
""",
:schema
)
end
@tag :pending_schema
test "with misplaced directives" do
assert_fails_validation(
"""
type MyObj implements MyInterface @onInterface {
myField(myArg: Int @onInputFieldDefinition): String @onInputFieldDefinition
}
scalar MyScalar @onEnum
interface MyInterface @onObject {
myField(myArg: Int @onInputFieldDefinition): String @onInputFieldDefinition
}
union MyUnion @onEnumValue = MyObj | Other
enum MyEnum @onScalar {
MY_VALUE @onUnion
}
input MyInput @onEnum {
myField: Int @onArgumentDefinition
}
schema @onObject {
query: MyQuery
}
""",
:schema,
[
misplaced_directive("onInterface", "OBJECT", 1),
misplaced_directive("onInputFieldDefinition", "ARGUMENT_DEFINITION", 2),
misplaced_directive("onInputFieldDefinition", "FIELD_DEFINITION", 2),
misplaced_directive("onEnum", "SCALAR", 5),
misplaced_directive("onObject", "INTERFACE", 7),
misplaced_directive("onInputFieldDefinition", "ARGUMENT_DEFINITION", 8),
misplaced_directive("onInputFieldDefinition", "FIELD_DEFINITION", 8),
misplaced_directive("onEnumValue", "UNION", 11),
misplaced_directive("onScalar", "ENUM", 13),
misplaced_directive("onUnion", "ENUM_VALUE", 14),
misplaced_directive("onEnum", "INPUT_OBJECT", 17),
misplaced_directive("onArgumentDefinition", "INPUT_FIELD_DEFINITION", 18),
misplaced_directive("onObject", "SCHEMA", 21)
]
)
end
end
end
defmodule Absinthe.Phase.Validation.KnownTypeNamesTest do
@phase Absinthe.Phase.Validation.KnownTypeNames
use Absinthe.ValidationPhaseCase,
phase: @phase,
async: true
alias Absinthe.Blueprint
def unknown_type(:variable_definition, name, line) do
bad_value(
Blueprint.Document.VariableDefinition,
error_message(name),
line,
&(Blueprint.TypeReference.unwrap(&1.type).name == name)
)
end
def unknown_type(:named_type_condition, name, line) do
unknown_type_condition(Blueprint.Document.Fragment.Named, name, line)
end
def unknown_type(:spread_type_condition, name, line) do
unknown_type_condition(Blueprint.Document.Fragment.Spread, name, line)
end
def unknown_type(:inline_type_condition, name, line) do
unknown_type_condition(Blueprint.Document.Fragment.Inline, name, line)
end
def unknown_type_condition(node_type, name, line) do
bad_value(
node_type,
error_message(name),
line,
&(&1.type_condition && Blueprint.TypeReference.unwrap(&1.type_condition).name == name)
)
end
def error_message(type) do
~s(Unknown type "#{type}".)
end
describe "Validate: Known type names" do
test "known type names are valid" do
assert_passes_validation(
"""
query Foo($var: String, $required: [String!]!) {
user(id: 4) {
pets { ... on Pet { name }, ...PetFields, ... { name } }
}
}
fragment PetFields on Pet {
name
}
""",
[]
)
end
test "unknown type names are invalid" do
assert_fails_validation(
"""
query Foo($var: JumbledUpLetters) {
user(id: 4) {
name
pets { ... on Badger { name }, ...PetFields }
}
}
fragment PetFields on Peettt {
name
}
""",
[],
[
unknown_type(:variable_definition, "JumbledUpLetters", 1),
unknown_type(:inline_type_condition, "Badger", 4),
unknown_type(:named_type_condition, "Peettt", 7)
]
)
end
test "ignores type definitions" do
assert_fails_validation(
"""
type NotInTheSchema {
field: FooBar
}
interface FooBar {
field: NotInTheSchema
}
union U = A | B
input Blob {
field: UnknownType
}
query Foo($var: NotInTheSchema) {
user(id: $var) {
id
}
}
""",
[],
unknown_type(:variable_definition, "NotInTheSchema", 11)
)
end
end
end
defmodule Absinthe.SchemaTest do
use Absinthe.Case, async: true
alias Absinthe.Schema
alias Absinthe.Type
describe "built-in types" do
def load_valid_schema do
load_schema("valid_schema")
end
test "are loaded" do
load_valid_schema()
builtin_types =
Absinthe.Fixtures.ValidSchema
|> Absinthe.Schema.types()
|> Enum.filter(&Absinthe.Type.built_in?(&1))
assert length(builtin_types) > 0
Enum.each(builtin_types, fn type ->
assert Absinthe.Fixtures.ValidSchema.__absinthe_type__(type.identifier) ==
Absinthe.Fixtures.ValidSchema.__absinthe_type__(type.name)
end)
int = Absinthe.Fixtures.ValidSchema.__absinthe_type__(:integer)
assert 1 == Type.Scalar.serialize(int, 1)
assert {:ok, 1} == Type.Scalar.parse(int, 1, %{})
end
end
describe "using the same identifier" do
@tag :pending_schema
test "raises an exception" do
assert_schema_error("schema_with_duplicate_identifiers", [
%{
rule: Absinthe.Schema.Rule.TypeNamesAreUnique,
data: %{artifact: "Absinthe type identifier", value: :person}
}
])
end
end
describe "using the same name" do
def load_duplicate_name_schema do
load_schema("schema_with_duplicate_names")
end
@tag :pending_schema
test "raises an exception" do
assert_schema_error("schema_with_duplicate_names", [
%{
rule: Absinthe.Schema.Rule.TypeNamesAreUnique,
data: %{artifact: "Type name", value: "Person"}
}
])
end
end
defmodule SourceSchema do
use Absinthe.Schema
@desc "can describe query"
query do
field :foo,
type: :foo,
resolve: fn _, _ -> {:ok, %{name: "Fancy Foo!"}} end
end
object :foo do
field :name, :string
end
end
defmodule UserSchema do
use Absinthe.Schema
import_types SourceSchema
query do
field :foo,
type: :foo,
resolve: fn _, _ -> {:ok, %{name: "A different fancy Foo!"}} end
field :bar,
type: :bar,
resolve: fn _, _ -> {:ok, %{name: "A plain old bar"}} end
end
object :bar do
field :name, :string
end
end
defmodule ThirdSchema do
use Absinthe.Schema
interface :named do
field :name, :string
resolve_type fn _, _ -> nil end
end
interface :aged do
field :age, :integer
resolve_type fn _, _ -> nil end
end
union :pet do
types [:dog]
end
object :dog do
field :name, :string
end
enum :some_enum do
values([:a, :b])
end
interface :loop do
field :loop, :loop
end
directive :directive do
arg :baz, :dir_enum
end
enum :dir_enum do
value :foo
end
query do
field :loop, :loop
field :enum_field, :some_enum
field :object_field, :user
field :interface_field, :aged
field :union_field, :pet
end
object :person do
field :age, :integer
interface :aged
end
import_types UserSchema
object :user do
field :name, :string
interface :named
end
object :baz do
field :name, :string
end
end
test "can have a description on the root query" do
assert "can describe query" == Absinthe.Schema.lookup_type(SourceSchema, :query).description
end
describe "using import_types" do
test "adds the types from a parent" do
assert %{foo: "Foo", bar: "Bar"} = UserSchema.__absinthe_types__()
assert "Foo" == UserSchema.__absinthe_type__(:foo).name
end
test "adds the types from a grandparent" do
assert %{foo: "Foo", bar: "Bar", baz: "Baz"} = ThirdSchema.__absinthe_types__()
assert "Foo" == ThirdSchema.__absinthe_type__(:foo).name
end
end
describe "lookup_type" do
test "is supported" do
assert "Foo" == Schema.lookup_type(ThirdSchema, :foo).name
end
end
defmodule RootsSchema do
use Absinthe.Schema
import_types SourceSchema
query do
field :name,
type: :string,
args: [
family_name: [type: :boolean]
]
end
mutation name: "MyRootMutation" do
field :name, :string
end
subscription name: "RootSubscriptionTypeThing" do
field :name, :string
end
end
describe "used_types" do
test "does not contain introspection types" do
assert !Enum.any?(
Schema.used_types(ThirdSchema),
&Type.introspection?/1
)
end
test "contains enums" do
types =
ThirdSchema
|> Absinthe.Schema.used_types()
|> Enum.map(& &1.identifier)
assert :some_enum in types
assert :dir_enum in types
end
test "contains interfaces" do
types =
ThirdSchema
|> Absinthe.Schema.used_types()
|> Enum.map(& &1.identifier)
assert :named in types
end
test "contains types only connected via interfaces" do
types =
ThirdSchema
|> Absinthe.Schema.used_types()
|> Enum.map(& &1.identifier)
assert :person in types
end
test "contains types only connected via union" do
types =
ThirdSchema
|> Absinthe.Schema.used_types()
|> Enum.map(& &1.identifier)
assert :dog in types
end
end
describe "introspection_types" do
test "is not empty" do
assert !Enum.empty?(Schema.introspection_types(ThirdSchema))
end
test "are introspection types" do
assert Enum.all?(
Schema.introspection_types(ThirdSchema),
&Type.introspection?/1
)
end
end
describe "root fields" do
test "can have a default name" do
assert "RootQueryType" == Schema.lookup_type(RootsSchema, :query).name
end
test "can have a custom name" do
assert "MyRootMutation" == Schema.lookup_type(RootsSchema, :mutation).name
end
test "supports subscriptions" do
assert "RootSubscriptionTypeThing" == Schema.lookup_type(RootsSchema, :subscription).name
end
end
describe "fields" do
test "have the correct structure in query" do
assert %Type.Field{name: "name"} = Schema.lookup_type(RootsSchema, :query).fields.name
end
test "have the correct structure in subscription" do
assert %Type.Field{name: "name"} =
Schema.lookup_type(RootsSchema, :subscription).fields.name
end
end
describe "arguments" do
test "have the correct structure" do
assert %Type.Argument{name: "family_name"} =
Schema.lookup_type(RootsSchema, :query).fields.name.args.family_name
end
end
defmodule FragmentSpreadSchema do
use Absinthe.Schema
@viewer %{id: "ABCD", name: "Bruce"}
query do
field :viewer, :viewer do
resolve fn _, _ -> {:ok, @viewer} end
end
end
object :viewer do
field :id, :id
field :name, :string
end
end
describe "multiple fragment spreads" do
@query """
query Viewer{viewer{id,...F1}}
fragment F0 on Viewer{name,id}
fragment F1 on Viewer{id,...F0}
"""
test "builds the correct result" do
assert_result(
{:ok, %{data: %{"viewer" => %{"id" => "ABCD", "name" => "Bruce"}}}},
run(@query, FragmentSpreadSchema)
)
end
end
defmodule MetadataSchema do
use Absinthe.Schema
query do
# Query type must exist
end
object :foo, meta: [foo: "bar"] do
meta :sql_table, "foos"
meta cache: false, eager: true
field :bar, :string do
meta :nice, "yup"
end
end
input_object :input_foo do
meta :is_input, true
field :bar, :string do
meta :nice, "nope"
end
end
enum :color do
meta :rgb_only, true
value :red
value :blue
value :green
end
scalar :my_scalar do
meta :is_scalar, true
# Missing parse and serialize
end
interface :named do
meta :is_interface, true
field :name, :string do
meta :is_name, true
end
end
union :result do
types [:foo]
meta :is_union, true
end
end
describe "can add metadata to an object" do
@tag :wip
test "sets object metadata" do
foo = Schema.lookup_type(MetadataSchema, :foo)
assert Enum.sort(eager: true, cache: false, sql_table: "foos", foo: "bar") ==
Enum.sort(foo.__private__[:meta])
assert Type.meta(foo, :sql_table) == "foos"
assert Type.meta(foo, :cache) == false
assert Type.meta(foo, :eager) == true
end
test "sets field metadata" do
foo = Schema.lookup_type(MetadataSchema, :foo)
assert %{__private__: [meta: [nice: "yup"]]} = foo.fields[:bar]
assert Type.meta(foo.fields[:bar], :nice) == "yup"
end
test "sets input object metadata" do
input_foo = Schema.lookup_type(MetadataSchema, :input_foo)
assert %{__private__: [meta: [is_input: true]]} = input_foo
assert Type.meta(input_foo, :is_input) == true
end
test "sets input object field metadata" do
input_foo = Schema.lookup_type(MetadataSchema, :input_foo)
assert %{__private__: [meta: [nice: "nope"]]} = input_foo.fields[:bar]
assert Type.meta(input_foo.fields[:bar], :nice) == "nope"
end
test "sets enum metadata" do
color = Schema.lookup_type(MetadataSchema, :color)
assert %{__private__: [meta: [rgb_only: true]]} = color
assert Type.meta(color, :rgb_only) == true
end
test "sets scalar metadata" do
my_scalar = Schema.lookup_type(MetadataSchema, :my_scalar)
assert %{__private__: [meta: [is_scalar: true]]} = my_scalar
assert Type.meta(my_scalar, :is_scalar) == true
end
test "sets interface metadata" do
named = Schema.lookup_type(MetadataSchema, :named)
assert %{__private__: [meta: [is_interface: true]]} = named
assert Type.meta(named, :is_interface) == true
end
test "sets interface field metadata" do
named = Schema.lookup_type(MetadataSchema, :named)
assert %{__private__: [meta: [is_name: true]]} = named.fields[:name]
assert Type.meta(named.fields[:name], :is_name) == true
end
test "sets union metadata" do
result = Schema.lookup_type(MetadataSchema, :result)
assert %{__private__: [meta: [is_union: true]]} = result
assert Type.meta(result, :is_union) == true
end
end
end
defmodule Absinthe.IntrospectionTest do
use Absinthe.Case, async: true
alias Absinthe.Schema
describe "introspection of an enum type" do
test "can use __type and value information with deprecations" do
result =
"""
{
__type(name: "Channel") {
kind
name
description
enumValues(includeDeprecated: true) {
name
description
isDeprecated
deprecationReason
}
}
}
"""
|> run(Absinthe.Fixtures.ColorSchema)
assert {:ok,
%{
data: %{
"__type" => %{
"name" => "Channel",
"description" => "A color channel",
"kind" => "ENUM",
"enumValues" => values
}
}
}} = result
assert [
%{
"name" => "BLUE",
"description" => "The color blue",
"isDeprecated" => false,
"deprecationReason" => nil
},
%{
"name" => "GREEN",
"description" => "The color green",
"isDeprecated" => false,
"deprecationReason" => nil
},
%{
"name" => "PUCE",
"description" => "The color puce",
"isDeprecated" => true,
"deprecationReason" => "it's ugly"
},
%{
"name" => "RED",
"description" => "The color red",
"isDeprecated" => false,
"deprecationReason" => nil
}
] == values |> Enum.sort_by(& &1["name"])
end
test "can use __type and value information without deprecations" do
result =
"""
{
__type(name: "Channel") {
kind
name
description
enumValues {
name
description
}
}
}
"""
|> run(Absinthe.Fixtures.ColorSchema)
assert {:ok,
%{
data: %{
"__type" => %{
"name" => "Channel",
"description" => "A color channel",
"kind" => "ENUM",
"enumValues" => values
}
}
}} = result
assert [
%{"name" => "BLUE", "description" => "The color blue"},
%{"name" => "GREEN", "description" => "The color green"},
%{"name" => "RED", "description" => "The color red"}
] == values |> Enum.sort_by(& &1["name"])
end
test "when used as the defaultValue of an argument" do
result =
"""
{
__schema {
queryType {
fields {
name
type {
name
}
args {
name
defaultValue
}
}
}
}
}
"""
|> run(Absinthe.Fixtures.ColorSchema)
assert {:ok, %{data: %{"__schema" => %{"queryType" => %{"fields" => fields}}}}} = result
assert [
%{"name" => "info", "args" => [%{"name" => "channel", "defaultValue" => "RED"}]}
] = fields
end
end
describe "introspection of an input object type" do
test "can use __type and ignore deprecated fields" do
result =
"""
{
__type(name: "ProfileInput") {
kind
name
description
inputFields {
name
description
type {
kind
name
ofType {
kind
name
}
}
defaultValue
}
}
}
"""
|> run(Absinthe.Fixtures.ContactSchema)
assert_result(
{:ok,
%{
data: %{
"__type" => %{
"description" => "The basic details for a person",
"inputFields" => [
%{
"defaultValue" => "43",
"description" => "The person's age",
"name" => "age",
"type" => %{"kind" => "SCALAR", "name" => "Int", "ofType" => nil}
},
%{
"defaultValue" => nil,
"description" => nil,
"name" => "code",
"type" => %{
"kind" => "NON_NULL",
"name" => nil,
"ofType" => %{"kind" => "SCALAR", "name" => "String"}
}
},
%{
"defaultValue" => "\"Janet\"",
"description" => "The person's name",
"name" => "name",
"type" => %{"kind" => "SCALAR", "name" => "String", "ofType" => nil}
}
],
"kind" => "INPUT_OBJECT",
"name" => "ProfileInput"
}
}
}},
result
)
assert !match?({:ok, %{data: %{"__type" => %{"fields" => _}}}}, result)
end
end
describe "introspection of an object type" do
test "can use __type and ignore deprecated fields" do
result =
"""
{
__type(name: "Person") {
kind
name
description
fields {
name
}
}
}
"""
|> run(Absinthe.Fixtures.ContactSchema)
assert_result(
{:ok,
%{
data: %{
"__type" => %{
"name" => "Person",
"description" => "A person",
"kind" => "OBJECT",
"fields" => [%{"name" => "age"}, %{"name" => "name"}, %{"name" => "others"}]
}
}
}},
result
)
end
test "can use __type and include deprecated fields" do
result =
"""
{
__type(name: "Person") {
kind
name
description
fields(includeDeprecated: true) {
name
isDeprecated
deprecationReason
}
}
}
"""
|> run(Absinthe.Fixtures.ContactSchema)
assert_result(
{:ok,
%{
data: %{
"__type" => %{
"description" => "A person",
"fields" => [
%{
"deprecationReason" => "change of privacy policy",
"isDeprecated" => true,
"name" => "address"
},
%{"deprecationReason" => nil, "isDeprecated" => false, "name" => "age"},
%{"deprecationReason" => nil, "isDeprecated" => false, "name" => "name"},
%{"deprecationReason" => nil, "isDeprecated" => false, "name" => "others"}
],
"kind" => "OBJECT",
"name" => "Person"
}
}
}},
result
)
end
test "can use __type to view interfaces" do
result =
"""
{
__type(name: "Person") {
interfaces {
name
}
}
}
"""
|> run(Absinthe.Fixtures.ContactSchema)
assert_result(
{:ok, %{data: %{"__type" => %{"interfaces" => [%{"name" => "NamedEntity"}]}}}},
result
)
end
defmodule KindSchema do
use Absinthe.Schema
query do
field :foo, :foo
end
object :foo do
field :name, :string
field :kind, :string
end
end
test "can use __type with a field named 'kind'" do
result =
"""
{
__type(name: "Foo") {
name
fields {
name
type {
name
kind
}
}
}
}
"""
|> run(KindSchema)
assert {:ok,
%{
data: %{
"__type" => %{
"fields" => [
%{"name" => "kind", "type" => %{"kind" => "SCALAR", "name" => "String"}},
%{"name" => "name", "type" => %{"kind" => "SCALAR", "name" => "String"}}
],
"name" => "Foo"
}
}
}} = result
end
test "can use __schema with a field named 'kind'" do
result =
"""
{
__schema {
queryType {
fields {
name
type {
name
kind
}
}
}
}
}
"""
|> run(KindSchema)
assert {:ok,
%{
data: %{
"__schema" => %{
"queryType" => %{
"fields" => [
%{"name" => "foo", "type" => %{"name" => "Foo", "kind" => "OBJECT"}}
]
}
}
}
}} = result
end
end
defmodule MySchema do
use Absinthe.Schema
query do
field :greeting,
type: :string,
description: "A traditional greeting",
resolve: fn _, _ -> {:ok, "Hah!"} end
end
end
describe "introspection of a scalar type" do
test "can use __type" do
result =
"""
{
__type(name: "String") {
kind
name
description,
fields {
name
}
}
}
"""
|> run(MySchema)
string = Schema.lookup_type(MySchema, :string)
assert_result(
{:ok,
%{
data: %{
"__type" => %{
"name" => string.name,
"description" => string.description,
"kind" => "SCALAR",
"fields" => nil
}
}
}},
result
)
end
end
describe "introspection of a union type" do
test "can use __type and get possible types" do
result =
"""
{
__type(name: "SearchResult") {
kind
name
description
possibleTypes {
name
}
}
}
"""
|> run(Absinthe.Fixtures.ContactSchema)
assert_result(
{:ok,
%{
data: %{
"__type" => %{
"description" => "A search result",
"kind" => "UNION",
"name" => "SearchResult",
"possibleTypes" => [%{"name" => "Business"}, %{"name" => "Person"}]
}
}
}},
result
)
end
end
end
defmodule Absinthe.UtilsTest do
use Absinthe.Case, async: true
alias Absinthe.Utils
@snake "foo_bar"
@preunderscored_snake "__foo_bar"
describe "camelize with :lower" do
test "handles normal snake-cased values" do
assert "fooBar" == Utils.camelize(@snake, lower: true)
end
test "handles snake-cased values starting with double underscores" do
assert "__fooBar" == Utils.camelize(@preunderscored_snake, lower: true)
end
end
describe "camelize without :lower" do
test "handles normal snake-cased values" do
assert "FooBar" == Utils.camelize(@snake)
end
test "handles snake-cased values starting with double underscores" do
assert "__FooBar" == Utils.camelize(@preunderscored_snake)
end
end
end
defmodule Absinthe.CustomTypesTest do
use Absinthe.Case, async: true
describe "custom datetime type" do
test "can use null in input_object" do
request = """
mutation {
custom_types_mutation(args: { datetime: null }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "returns an error when datetime value cannot be parsed" do
request = """
mutation {
custom_types_mutation(args: { datetime: "abc" }) {
message
}
}
"""
assert {:ok, %{errors: _errors}} = run(request, Absinthe.Fixtures.CustomTypesSchema)
end
end
describe "custom naive datetime type" do
test "can use naive datetime type in queries" do
result =
"{ custom_types_query { naive_datetime } }" |> run(Absinthe.Fixtures.CustomTypesSchema)
assert_result(
{:ok, %{data: %{"custom_types_query" => %{"naive_datetime" => "2017-01-27T20:31:55"}}}},
result
)
end
test "can use naive datetime type in input_object" do
request = """
mutation {
custom_types_mutation(args: { naive_datetime: "2017-01-27T20:31:55" }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "can use null in input_object" do
request = """
mutation {
custom_types_mutation(args: { naive_datetime: null }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "returns an error when naive datetime value cannot be parsed" do
request = """
mutation {
custom_types_mutation(args: { naive_datetime: "abc" }) {
message
}
}
"""
assert {:ok, %{errors: _errors}} = run(request, Absinthe.Fixtures.CustomTypesSchema)
end
end
describe "custom date type" do
test "can use date type in queries" do
result = "{ custom_types_query { date } }" |> run(Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_query" => %{"date" => "2017-01-27"}}}}, result)
end
test "can use date type in input_object" do
request = """
mutation {
custom_types_mutation(args: { date: "2017-01-27" }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "can use null in input_object" do
request = """
mutation {
custom_types_mutation(args: { date: null }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "returns an error when date value cannot be parsed" do
request = """
mutation {
custom_types_mutation(args: { date: "abc" }) {
message
}
}
"""
assert {:ok, %{errors: _errors}} = run(request, Absinthe.Fixtures.CustomTypesSchema)
end
end
describe "custom time type" do
test "can use time type in queries" do
result = "{ custom_types_query { time } }" |> run(Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_query" => %{"time" => "20:31:55"}}}}, result)
end
test "can use time type in input_object" do
request = """
mutation {
custom_types_mutation(args: { time: "20:31:55" }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "can use null in input_object" do
request = """
mutation {
custom_types_mutation(args: { time: null }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "returns an error when time value cannot be parsed" do
request = """
mutation {
custom_types_mutation(args: { time: "abc" }) {
message
}
}
"""
assert {:ok, %{errors: _errors}} = run(request, Absinthe.Fixtures.CustomTypesSchema)
end
end
describe "custom decimal type" do
test "can use decimal type in queries" do
result = "{ custom_types_query { decimal } }" |> run(Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_query" => %{"decimal" => "-3.49"}}}}, result)
end
test "can use decimal type as string in input_object" do
request = """
mutation {
custom_types_mutation(args: { decimal: "-3.49" }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "can use decimal type as integer in input_object" do
request = """
mutation {
custom_types_mutation(args: { decimal: 3 }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "can use decimal type as float in input_object" do
request = """
mutation {
custom_types_mutation(args: { decimal: -3.49 }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "can use null in input_object" do
request = """
mutation {
custom_types_mutation(args: { decimal: null }) {
message
}
}
"""
result = run(request, Absinthe.Fixtures.CustomTypesSchema)
assert_result({:ok, %{data: %{"custom_types_mutation" => %{"message" => "ok"}}}}, result)
end
test "returns an error when decimal value cannot be parsed" do
request = """
mutation {
custom_types_mutation(args: { decimal: "abc" }) {
message
}
}
"""
assert {:ok, %{errors: _errors}} = run(request, Absinthe.Fixtures.CustomTypesSchema)
end
end
end
defmodule Elixir.Absinthe.Integration.Execution.Fragments.IntrospectionTest do
use ExUnit.Case, async: true
@query """
query Q {
__type(name: "ProfileInput") {
name
kind
fields {
name
}
...Inputs
}
}
fragment Inputs on __Type {
inputFields { name }
}
"""
test "scenario #1" do
result = Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
assert {:ok,
%{
data: %{
"__type" => %{
"name" => "ProfileInput",
"kind" => "INPUT_OBJECT",
"fields" => nil,
"inputFields" => input_fields
}
}
}} = result
correct = [%{"name" => "code"}, %{"name" => "name"}, %{"name" => "age"}]
sort = & &1["name"]
assert Enum.sort_by(input_fields, sort) == Enum.sort_by(correct, sort)
end
end
defmodule Elixir.Absinthe.Integration.Execution.Fragments.BasicTest do
use ExUnit.Case, async: true
@query """
query Q {
person {
...NamedPerson
}
}
fragment NamedPerson on Person {
name
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Fragments.BasicRootTypeTest do
use ExUnit.Case, async: true
@query """
query {
... Fields
}
fragment Fields on RootQueryType {
thing(id: "foo") {
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"thing" => %{"name" => "Foo"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.SimpleQueryTest do
use ExUnit.Case, async: true
@query """
query { thing(id: "foo") { name } }
"""
test "scenario #1" do
assert {:ok, %{data: %{"thing" => %{"name" => "Foo"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputObjectTest do
use ExUnit.Case, async: true
@query """
mutation {
updateThing(id: "foo", thing: {value: 100}) {
name
value
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"updateThing" => %{"name" => "Foo", "value" => 100}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.RootValueTest do
use ExUnit.Case, async: true
@query """
query { version }
"""
test "scenario #1" do
assert {:ok, %{data: %{"version" => "0.0.1"}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, root_value: %{version: "0.0.1"})
end
end
defmodule Elixir.Absinthe.Integration.Execution.CustomTypes.Datetime.InputObjectTest do
use ExUnit.Case, async: true
@query """
mutation {
customTypesMutation(args: { datetime: "2017-01-27T20:31:55Z" }) {
message
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"customTypesMutation" => %{"message" => "ok"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.CustomTypesSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.CustomTypes.BasicTest do
use ExUnit.Case, async: true
@query """
query {
customTypesQuery { datetime }
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"customTypesQuery" => %{"datetime" => "2017-01-27T20:31:55Z"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.CustomTypesSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.NestedObjectsTest do
use ExUnit.Case, async: true
@query """
query {
thing(id: "foo") {
name
otherThing {
name
}
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"thing" => %{"name" => "Foo", "otherThing" => %{"name" => "Bar"}}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.ContextTest do
use ExUnit.Case, async: true
@query """
query {
thingByContext {
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"thingByContext" => %{"name" => "Bar"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, context: %{thing: "bar"})
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.ObjectTypenameTest do
use ExUnit.Case, async: true
@query """
query {
person {
__typename
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"person" => %{"__typename" => "Person", "name" => "Bruce"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.MutationTypeTest do
use ExUnit.Case, async: true
@query """
query { __schema { mutationType { name kind } } }
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"__schema" => %{
"mutationType" => %{"kind" => "OBJECT", "name" => "RootMutationType"}
}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.InterfaceTypenameAliasTest do
use ExUnit.Case, async: true
@query """
query { contact { entity { kind: __typename name } } }
"""
test "scenario #1" do
assert {:ok, %{data: %{"contact" => %{"entity" => %{"kind" => "Person", "name" => "Bruce"}}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.UnionWrappedTypenameTest do
use ExUnit.Case, async: true
@query """
query { searchResults { __typename } }
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"searchResults" => [%{"__typename" => "Person"}, %{"__typename" => "Business"}]
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.UnionTypenameTest do
use ExUnit.Case, async: true
@query """
query { firstSearchResult { __typename } }
"""
test "scenario #1" do
assert {:ok, %{data: %{"firstSearchResult" => %{"__typename" => "Person"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.SubscriptionTypeTest do
use ExUnit.Case, async: true
@query """
query { __schema { subscriptionType { name kind } } }
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"__schema" => %{
"subscriptionType" => %{"kind" => "OBJECT", "name" => "RootSubscriptionType"}
}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.SchemaTypesTest do
use ExUnit.Case, async: true
@query """
query { __schema { types { name } } }
"""
test "scenario #1" do
result = Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
assert {:ok, %{data: %{"__schema" => %{"types" => types}}}} = result
names = types |> Enum.map(& &1["name"]) |> Enum.sort()
expected =
~w(Int String Boolean Contact Person Business ProfileInput SearchResult NamedEntity RootMutationType RootQueryType RootSubscriptionType __Schema __Directive __DirectiveLocation __EnumValue __Field __InputValue __Type)
|> Enum.sort()
assert expected == names
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.TypeInterfaceTest do
use ExUnit.Case, async: true
@query """
query {
__type(name: "NamedEntity") {
kind
name
description
possibleTypes {
name
}
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"__type" => %{
"description" => "A named entity",
"kind" => "INTERFACE",
"name" => "NamedEntity",
"possibleTypes" => [%{"name" => "Business"}, %{"name" => "Person"}]
}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.FullTest do
use ExUnit.Case, async: true
@query """
query IntrospectionQuery {
__schema {
queryType { name }
mutationType { name }
subscriptionType { name }
types {
...FullType
}
directives {
name
description
locations
args {
...InputValue
}
}
}
}
fragment FullType on __Type {
kind
name
description
fields(includeDeprecated: true) {
name
description
args {
...InputValue
}
type {
...TypeRef
}
isDeprecated
deprecationReason
}
inputFields {
...InputValue
}
interfaces {
...TypeRef
}
enumValues(includeDeprecated: true) {
name
description
isDeprecated
deprecationReason
}
possibleTypes {
...TypeRef
}
}
fragment InputValue on __InputValue {
name
description
type { ...TypeRef }
defaultValue
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
}
}
}
}
"""
test "scenario #1" do
result = Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
{:ok, %{data: %{"__schema" => schema}}} = result
assert !is_nil(schema)
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.DirectivesTest do
use ExUnit.Case, async: true
@query """
query {
__schema {
directives {
name
args { name type { kind ofType { name kind } } }
locations
onField
onFragment
onOperation
}
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"__schema" => %{
"directives" => [
%{
"args" => [
%{
"name" => "if",
"type" => %{
"kind" => "NON_NULL",
"ofType" => %{"kind" => "SCALAR", "name" => "Boolean"}
}
}
],
"locations" => ["FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"],
"name" => "include",
"onField" => true,
"onFragment" => true,
"onOperation" => false
},
%{
"args" => [
%{
"name" => "if",
"type" => %{
"kind" => "NON_NULL",
"ofType" => %{"kind" => "SCALAR", "name" => "Boolean"}
}
}
],
"locations" => ["FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"],
"name" => "skip",
"onField" => true,
"onFragment" => true,
"onOperation" => false
}
]
}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.InterfaceTypenameTest do
use ExUnit.Case, async: true
@query """
query { contact { entity { __typename name } } }
"""
test "scenario #1" do
assert {:ok,
%{data: %{"contact" => %{"entity" => %{"__typename" => "Person", "name" => "Bruce"}}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.DefaultValueEnumTest do
use ExUnit.Case, async: true
@query """
query {
__type(name: "ChannelInput") {
name
inputFields {
name
defaultValue
}
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"__type" => %{
"inputFields" => [%{"defaultValue" => "RED", "name" => "channel"}],
"name" => "ChannelInput"
}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ColorSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.ObjectWithListTest do
use ExUnit.Case, async: true
@query """
query {
__type(name: "Person") {
fields(include_deprecated: true) {
name
type {
kind
name
ofType {
kind
name
}
}
}
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"__type" => %{
"fields" => [
%{
"name" => "address",
"type" => %{"kind" => "SCALAR", "name" => "String", "ofType" => nil}
},
%{
"name" => "age",
"type" => %{"kind" => "SCALAR", "name" => "Int", "ofType" => nil}
},
%{
"name" => "name",
"type" => %{"kind" => "SCALAR", "name" => "String", "ofType" => nil}
},
%{
"name" => "others",
"type" => %{
"kind" => "LIST",
"name" => nil,
"ofType" => %{"kind" => "OBJECT", "name" => "Person"}
}
}
]
}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.ObjectTypenameAliasTest do
use ExUnit.Case, async: true
@query """
query {
person {
kind: __typename
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"person" => %{"kind" => "Person", "name" => "Bruce"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Introspection.QueryTypeTest do
use ExUnit.Case, async: true
@query """
query { __schema { queryType { name kind } } }
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"__schema" => %{"queryType" => %{"kind" => "OBJECT", "name" => "RootQueryType"}}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Resolution.ErrorsIncludePathIndicesTest do
use ExUnit.Case, async: true
@query """
query {
things {
id
fail(id: "foo")
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"things" => [%{"fail" => "bar", "id" => "bar"}, %{"fail" => nil, "id" => "foo"}]
},
errors: [
%{
message: "fail",
path: ["things", 1, "fail"],
locations: [%{column: 5, line: 4}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Resolution.MultipleErrorsTest do
use ExUnit.Case, async: true
@query """
mutation { failingThing(type: MULTIPLE) { name } }
"""
test "scenario #1" do
assert {:ok,
%{
data: %{"failingThing" => nil},
errors: [
%{message: "one", path: ["failingThing"], locations: [%{column: 12, line: 1}]},
%{message: "two", path: ["failingThing"], locations: [%{column: 12, line: 1}]}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Resolution.Exceptions.MissingErrorMessageWhenReturningMultipleTest do
use ExUnit.Case, async: true
@query """
mutation { failingThing(type: MULTIPLE_WITHOUT_MESSAGE) { name } }
"""
test "scenario #1" do
assert_raise(Absinthe.ExecutionError, fn ->
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end)
end
end
defmodule Elixir.Absinthe.Integration.Execution.Resolution.Exceptions.BadMatchTest do
use ExUnit.Case, async: true
@query """
query {
badResolution {
name
}
}
"""
test "scenario #1" do
assert_raise(Absinthe.ExecutionError, fn ->
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end)
end
end
defmodule Elixir.Absinthe.Integration.Execution.Resolution.Exceptions.MissingErrorMessageTest do
use ExUnit.Case, async: true
@query """
mutation { failingThing(type: WITHOUT_MESSAGE) { name } }
"""
test "scenario #1" do
assert_raise(Absinthe.ExecutionError, fn ->
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end)
end
end
defmodule Elixir.Absinthe.Integration.Execution.Resolution.MultipleErrorsWithExtraFieldsTest do
use ExUnit.Case, async: true
@query """
mutation { failingThing(type: MULTIPLE_WITH_CODE) { name } }
"""
test "scenario #1" do
assert {:ok,
%{
data: %{"failingThing" => nil},
errors: [
%{
code: 1,
message: "Custom Error 1",
path: ["failingThing"],
locations: [%{column: 12, line: 1}]
},
%{
code: 2,
message: "Custom Error 2",
path: ["failingThing"],
locations: [%{column: 12, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Resolution.ExtraErrorFieldsTest do
use ExUnit.Case, async: true
@query """
mutation { failingThing(type: WITH_CODE) { name } }
"""
test "scenario #1" do
assert {:ok,
%{
data: %{"failingThing" => nil},
errors: [
%{
code: 42,
message: "Custom Error",
path: ["failingThing"],
locations: [%{column: 12, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Variables.DefaultValueTest do
use ExUnit.Case, async: true
@query """
query ($mult: Int = 6) {
times(base: 4, multiplier: $mult)
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"times" => 24}}} ==
Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Variables.BasicTest do
use ExUnit.Case, async: true
@query """
query ($thingId: String!) {
thing(id: $thingId) {
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"thing" => %{"name" => "Bar"}}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ThingsSchema,
variables: %{"thingId" => "bar"}
)
end
end
defmodule Elixir.Absinthe.Integration.Execution.SimpleQueryReturningListTest do
use ExUnit.Case, async: true
@query """
query {
things {
id
name
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"things" => [%{"id" => "bar", "name" => "Bar"}, %{"id" => "foo", "name" => "Foo"}]
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Enum.LiteralTest do
use ExUnit.Case, async: true
@query """
query {
red: info(channel: RED) {
name
value
}
green: info(channel: GREEN) {
name
value
}
blue: info(channel: BLUE) {
name
value
}
puce: info(channel: PUCE) {
name
value
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"blue" => %{"name" => "BLUE", "value" => 300},
"green" => %{"name" => "GREEN", "value" => 200},
"puce" => %{"name" => "PUCE", "value" => -100},
"red" => %{"name" => "RED", "value" => 100}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.ColorSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNullListOfNonNull_TTest do
use ExUnit.Case, async: true
@query """
query ($value: [Int!]!) {
nonNullableListOfNonNullableType(input: $value) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Argument \"input\" has invalid value $value.",
locations: [%{column: 36, line: 2}]
},
%{
message: "Variable \"value\": Expected non-null, found null.",
locations: [%{column: 8, line: 1}]
}
]
}} ==
Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, variables: %{"value" => nil})
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToElementOfTypeNonNullListOf_TTest do
use ExUnit.Case, async: true
@query """
query {
nonNullableList(input: [null, 1]) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"nonNullableList" => %{
"content" => [nil, 1],
"length" => 2,
"nonNullCount" => 1,
"nullCount" => 1
}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToType_TOverridesDefaultValueTest do
use ExUnit.Case, async: true
@query """
query {
times: objTimes(input: {base: 4, multiplier: null})
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"times" => 4}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ObjectTimesSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNullListOf_TElementTest do
use ExUnit.Case, async: true
@query """
query ($value: [Int!]) {
nullableListOfNonNullableType(input: $value) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Argument \"input\" has invalid value $value.\nIn element #1: Expected type \"Int!\", found null.",
locations: [%{column: 33, line: 2}]
}
]
}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.NullListsSchema,
variables: %{"value" => [nil, 1]}
)
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToVariableTypeNonNull_TTest do
use ExUnit.Case, async: true
@query """
query ($mult: Int!) {
times(base: 4, multiplier: $mult)
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Variable \"mult\": Expected non-null, found null.",
locations: [%{column: 8, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, variables: %{"mult" => nil})
end
test "scenario #2" do
assert {:ok,
%{
errors: [
%{
message: "Variable \"mult\": Expected non-null, found null.",
locations: [%{column: 8, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, [])
end
test "scenario #3" do
assert {:ok, %{data: %{"times" => 8}}} ==
Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, variables: %{"mult" => 2})
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNullListOfNonNull_TTest do
use ExUnit.Case, async: true
@query """
query {
nonNullableListOfNonNullableType(input: null) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Argument \"input\" has invalid value null.",
locations: [%{column: 36, line: 2}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToElementOfTypeListOf_TTest do
use ExUnit.Case, async: true
@query """
query {
nullableList(input: [null, 1]) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"nullableList" => %{
"content" => [nil, 1],
"length" => 2,
"nonNullCount" => 1,
"nullCount" => 1
}
}
}} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToVariableWithDefaultValueTest do
use ExUnit.Case, async: true
@query """
query ($mult: Int = 6) {
times(base: 4, multiplier: $mult)
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"times" => 24}}} ==
Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, [])
end
test "scenario #2" do
assert {:ok, %{data: %{"times" => 4}}} ==
Absinthe.run(@query, Absinthe.Fixtures.TimesSchema, variables: %{"mult" => nil})
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNullListOf_TElementTest do
use ExUnit.Case, async: true
@query """
{
nullableListOfNonNullableType(input: [null, 1]) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Argument \"input\" has invalid value [null, 1].\nIn element #1: Expected type \"Int!\", found null.",
locations: [%{column: 33, line: 2}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNullListOfNonNull_TElementTest do
use ExUnit.Case, async: true
@query """
query ($value: [Int!]!) {
nonNullableListOfNonNullableType(input: $value) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Argument \"input\" has invalid value $value.\nIn element #1: Expected type \"Int!\", found null.",
locations: [%{column: 36, line: 2}]
}
]
}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.NullListsSchema,
variables: %{"value" => [nil, 1]}
)
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNullListOf_TTest do
use ExUnit.Case, async: true
@query """
query {
nullableListOfNonNullableType(input: null) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"nullableListOfNonNullableType" => nil}}} ==
Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNullListOfNonNull_TElementTest do
use ExUnit.Case, async: true
@query """
query {
nonNullableListOfNonNullableType(input: [null, 1]) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Argument \"input\" has invalid value [null, 1].\nIn element #1: Expected type \"Int!\", found null.",
locations: [%{column: 36, line: 2}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeNonNull_TTest do
use ExUnit.Case, async: true
@query """
query {
times: objTimes(input: {base: null})
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Argument \"input\" has invalid value {base: null}.\nIn field \"base\": Expected type \"Int!\", found null.",
locations: [%{column: 19, line: 2}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ObjectTimesSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeListOf_TTest do
use ExUnit.Case, async: true
@query """
query ($value: [Int]) {
nullableList(input: $value) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"nullableList" => nil}}} ==
Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, variables: %{"value" => nil})
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNullListOf_TTest do
use ExUnit.Case, async: true
@query """
query ($value: [Int!]) {
nullableListOfNonNullableType(input: $value) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"nullableListOfNonNullableType" => nil}}} ==
Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, variables: %{"value" => nil})
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeNonNull_TTest do
use ExUnit.Case, async: true
@query """
query ($value: Int!) { times: objTimes(input: {base: $value}) }
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Argument \"input\" has invalid value {base: $value}.\nIn field \"base\": Expected type \"Int!\", found $value.",
locations: [%{column: 40, line: 1}]
},
%{
message: "Variable \"value\": Expected non-null, found null.",
locations: [%{column: 8, line: 1}]
}
]
}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ObjectTimesSchema,
variables: %{"value" => nil}
)
end
test "scenario #2" do
assert {:ok, %{data: %{"times" => 16}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ObjectTimesSchema, variables: %{"value" => 8})
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.LiteralToTypeListOf_TTest do
use ExUnit.Case, async: true
@query """
query {
nullableList(input: null) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"nullableList" => nil}}} ==
Absinthe.run(@query, Absinthe.Fixtures.NullListsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToType_TOverridesDefaultValueTest do
use ExUnit.Case, async: true
@query """
query ($multiplier: Int) {
times: objTimes(input: {base: 4, multiplier: $multiplier})
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"times" => 4}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ObjectTimesSchema,
variables: %{"multiplier" => nil}
)
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToTypeListOf_TElementTest do
use ExUnit.Case, async: true
@query """
query ($value: [Int]) {
nullableList(input: $value) {
length
content
nonNullCount
nullCount
}
}
"""
test "scenario #1" do
assert {:ok,
%{
data: %{
"nullableList" => %{
"content" => [nil, 1],
"length" => 2,
"nonNullCount" => 1,
"nullCount" => 1
}
}
}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.NullListsSchema,
variables: %{"value" => [nil, 1]}
)
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Null.VariableToType_TTest do
use ExUnit.Case, async: true
@query """
query ($value: Int) {
times: objTimes(input: {base: 4, multiplier: $value})
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"times" => 4}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ObjectTimesSchema,
variables: %{"value" => nil}
)
end
test "scenario #2" do
assert {:ok, %{data: %{"times" => 32}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ObjectTimesSchema, variables: %{"value" => 8})
end
end
defmodule Elixir.Absinthe.Integration.Execution.InputTypes.Id.LiteralTest do
use ExUnit.Case, async: true
@query """
{
item(id: "foo") {
id
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"item" => %{"id" => "foo", "name" => "Foo"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.IdTestSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Aliases.AllCapsAliasTest do
use ExUnit.Case, async: true
@query """
query {
thing(id: "foo") {
FOO: name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"thing" => %{"FOO" => "Foo"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Aliases.LeadingUnderscoreTest do
use ExUnit.Case, async: true
@query """
query {
_thing123: thing(id: "foo") {
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"_thing123" => %{"name" => "Foo"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Aliases.WithErrorsTest do
use ExUnit.Case, async: true
@query """
mutation { foo: failingThing(type: WITH_CODE) { name } }
"""
test "scenario #1" do
assert {:ok,
%{
data: %{"foo" => nil},
errors: [
%{
code: 42,
message: "Custom Error",
path: ["foo"],
locations: [%{column: 12, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Aliases.WeirdTest do
use ExUnit.Case, async: true
@query """
query {
thing(id: "foo") {
fOO_Bar_baz: name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"thing" => %{"fOO_Bar_baz" => "Foo"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Aliases.AliasTest do
use ExUnit.Case, async: true
# LEAVE ME
@query """
query {
widget: thing(id: "foo") {
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"widget" => %{"name" => "Foo"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.Aliases.DifferentSelectionSetsTest do
use ExUnit.Case, async: true
@query """
query {
thing1: thing(id: "foo") {
id
}
thing2: thing(id: "bar") {
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"thing1" => %{"id" => "foo"}, "thing2" => %{"name" => "Bar"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Execution.OperationByNameTest do
use ExUnit.Case, async: true
@query """
query ThingFoo {
thing(id: "foo") {
name
}
}
query ThingBar {
thing(id: "bar") {
name
}
}
"""
test "scenario #1" do
assert {:ok, %{data: %{"thing" => %{"name" => "Foo"}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, operation_name: "ThingFoo")
end
test "scenario #2" do
assert {:ok,
%{
errors: [
%{
message:
"Must provide a valid operation name if query contains multiple operations."
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
test "scenario #3" do
assert {:ok,
%{
errors: [
%{
message:
"Must provide a valid operation name if query contains multiple operations."
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, operation_name: "invalid")
end
end
defmodule Elixir.Absinthe.Integration.Parsing.BasicErrorTest do
use ExUnit.Case, async: true
@query """
{
thing(id: "foo") {}{ name }
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{message: "syntax error before: '}'", locations: [%{column: 21, line: 2}]}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.MissingOperationTest do
use ExUnit.Case, async: true
@query """
mutation { foo }
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Operation \"mutation\" not supported",
locations: [%{column: 1, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.OnlyQuerySchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.RequiredArgumentsTest do
use ExUnit.Case, async: true
@query """
query { thing { name } }
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "In argument \"id\": Expected type \"String!\", found null.",
locations: [%{column: 9, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.MissingSelectionSetTest do
use ExUnit.Case, async: true
@query """
query {
things
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Field \"things\" of type \"[Thing]\" must have a selection of subfields. Did you mean \"things { ... }\"?",
locations: [%{column: 3, line: 2}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.ExtraArgumentsTest do
use ExUnit.Case, async: true
@query """
query {
thing(id: "foo", extra: "dunno") {
name
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Unknown argument \"extra\" on field \"thing\" of type \"RootQueryType\".",
locations: [%{column: 20, line: 2}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.InvalidArgumentTest do
use ExUnit.Case, async: true
@query """
query { number(val: "AAA") }
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Argument \"val\" has invalid value \"AAA\".",
locations: [%{column: 16, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.ErrorResultWhenBadListArgumentTest do
use ExUnit.Case, async: true
@query """
query {
thing(id: ["foo"]) {
name
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Argument \"id\" has invalid value [\"foo\"].",
locations: [%{column: 9, line: 2}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.UnknownFieldTest do
use ExUnit.Case, async: true
@query """
{
thing(id: "foo") {
name
bad
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Cannot query field \"bad\" on type \"Thing\".",
locations: [%{column: 5, line: 4}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.Variables.Unused.WithoutOperationNameTest do
use ExUnit.Case, async: true
@query """
query ($test: String) {
thing(id: "foo") {
name
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{message: "Variable \"test\" is never used.", locations: [%{column: 8, line: 1}]}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.Variables.Unused.WithOperationNameTest do
use ExUnit.Case, async: true
@query """
query AnOperationName($test: String) {
thing(id: "foo") {
name
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Variable \"test\" is never used in operation \"AnOperationName\".",
locations: [%{column: 23, line: 1}, %{column: 1, line: 1}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.UnknownArgForListMemberFieldTest do
use ExUnit.Case, async: true
@query """
query {
things {
id(x: 1)
name
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Unknown argument \"x\" on field \"id\" of type \"Thing\".",
locations: [%{column: 8, line: 3}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.InvalidNestedTypeTest do
use ExUnit.Case, async: true
@query """
mutation UpdateThingValueBadly {
thing: updateThing(id: "foo", thing: {value: "BAD"}) {
name
value
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Argument \"thing\" has invalid value {value: \"BAD\"}.\nIn field \"value\": Expected type \"Int\", found \"BAD\".",
locations: [%{column: 33, line: 2}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.ObjectSpreadsInObjectScopeTest do
use ExUnit.Case, async: true
@query """
query Q {
person {
name
...NamedBusiness
}
}
fragment NamedBusiness on Business {
employee_count
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Fragment spread has no type overlap with parent.\nParent possible types: [\"Person\"]\nSpread possible types: [\"Business\"]\n",
locations: [%{column: 5, line: 4}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ContactSchema, [])
end
end
defmodule Elixir.Absinthe.Integration.Validation.IntrospectionFieldsIgnoredInInputObjectsTest do
use ExUnit.Case, async: true
@query """
mutation ($input: InputThing) {
thing: updateThing(id: "foo", thing: $input) {
name
value
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message:
"Argument \"thing\" has invalid value $input.\nIn field \"__typename\": Unknown field.",
locations: [%{column: 33, line: 2}]
}
]
}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ThingsSchema,
variables: %{"input" => %{"__typename" => "foo", "value" => 100}}
)
end
end
defmodule Elixir.Absinthe.Integration.Validation.CyclesTest do
use ExUnit.Case, async: true
@query """
query Foo {
name
}
fragment Foo on Blag {
name
...Bar
}
fragment Bar on Blah {
age
...Foo
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Cannot spread fragment \"Foo\" within itself via \"Bar\", \"Foo\".",
locations: [%{column: 1, line: 4}]
},
%{
message: "Cannot spread fragment \"Bar\" within itself via \"Foo\", \"Bar\".",
locations: [%{column: 1, line: 8}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.ThingsSchema, [])
end
end
defmodule Absinthe.TypeTest do
use Absinthe.Case, async: true
alias Absinthe.Type
defmodule BasicSchema do
use Absinthe.Schema
@items %{
"foo" => %{id: "foo", name: "Foo"},
"bar" => %{id: "bar", name: "Bar"}
}
query do
field :item,
type: :item,
args: [
id: [type: non_null(:id)]
],
resolve: fn %{id: item_id}, _ ->
{:ok, @items[item_id]}
end
end
object :item do
description "A Basic Type"
field :id, :id
field :name, :string
end
object :author do
description "An author"
field :id, :id
field :first_name, :string
field :last_name, :string
field :books, list_of(:book)
end
object :book, name: "NonFictionBook" do
description "A Book"
field :id, :id
field :title, :string
field :isbn, :string
field :authors, list_of(:author)
end
end
test "definition with custom name" do
assert %Type.Object{name: "NonFictionBook"} = BasicSchema.__absinthe_type__(:book)
end
test "that uses a name derived from the identifier" do
assert %Type.Object{name: "Item"} = BasicSchema.__absinthe_type__(:item)
end
test "root query type definition" do
assert Absinthe.Fixtures.ContactSchema.__absinthe_type__(:query).name == "RootQueryType"
end
test "root mutation type definition" do
assert Absinthe.Fixtures.ContactSchema.__absinthe_type__(:mutation).name == "RootMutationType"
end
defmodule MetadataSchema do
use Absinthe.Schema
query do
# Query type must exist
end
object :with_meta do
meta :foo, "bar"
end
object :without_meta do
end
end
@with_meta Absinthe.Schema.lookup_type(MetadataSchema, :with_meta)
@without_meta Absinthe.Schema.lookup_type(MetadataSchema, :without_meta)
describe ".meta/1" do
test "when no metadata is defined, returns an empty map" do
assert Type.meta(@without_meta) == %{}
end
test "when metadata is defined, returns the metadata as a map" do
assert Type.meta(@with_meta) == %{foo: "bar"}
end
end
describe ".meta/2" do
test "when no metadata field is defined, returns nil" do
assert Type.meta(@without_meta, :bar) == nil
end
test "when the requested metadata field is not defined, returns nil" do
assert Type.meta(@with_meta, :bar) == nil
end
test "when the metadata is defined, returns the value" do
assert Type.meta(@with_meta, :foo) == "bar"
end
end
end
defmodule Absinthe.Language.EnumTypeDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
describe "converting to Blueprint" do
test "works, given a Blueprint Schema 'enum' definition" do
rep = "enum Episode { NEWHOPE, EMPIRE, JEDI }" |> from_input
assert %Blueprint.Schema.EnumTypeDefinition{
name: "Episode",
values: [
%Blueprint.Schema.EnumValueDefinition{value: "NEWHOPE"},
%Blueprint.Schema.EnumValueDefinition{value: "EMPIRE"},
%Blueprint.Schema.EnumValueDefinition{value: "JEDI"}
]
} = rep
end
test "works, given a Blueprint Schema 'enum' definition with a directive" do
rep =
"""
enum Episode @description(text: "An episode") { NEWHOPE, EMPIRE, JEDI }
"""
|> from_input
assert %Blueprint.Schema.EnumTypeDefinition{
name: "Episode",
directives: [%Blueprint.Directive{name: "description"}],
values: [
%Blueprint.Schema.EnumValueDefinition{value: "NEWHOPE"},
%Blueprint.Schema.EnumValueDefinition{value: "EMPIRE"},
%Blueprint.Schema.EnumValueDefinition{value: "JEDI"}
]
} = rep
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Absinthe.Language.Document{definitions: [node]}) do
node
end
end
defmodule Absinthe.Language.DirectiveDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Language}
describe "blueprint conversion" do
test "works, given a Blueprint Schema 'directive' definition without arguments" do
assert %Blueprint.Schema.DirectiveDefinition{name: "thingy", locations: ["FIELD", "OBJECT"]} =
from_input("directive @thingy on FIELD | OBJECT")
end
test "works, given a Blueprint Schema 'directive' definition without arguments and with directives" do
rep =
"""
directive @authorized(if: Boolean!) on FIELD @description(text: "When 'if' is true, only include the field if authorized")
"""
|> from_input
assert %Blueprint.Schema.DirectiveDefinition{
name: "authorized",
locations: ["FIELD"],
directives: [%{name: "description"}]
} = rep
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Language.Document{definitions: [node]}) do
node
end
end
defmodule Absinthe.Language.DocumentTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
alias Absinthe.Language.Document
alias Absinthe.Language.OperationDefinition
@input """
query MyQuery1 {
thing(id: "1") {
name
}
}
query MyQuery2 {
thing(id: "1") {
name
}
}
mutation MyMutation {
thing(id: "1") {
name
}
}
"""
describe "get_operation/2" do
test "given an existing operation name, returns the operation definition" do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(@input)
result = Document.get_operation(doc, "MyQuery2")
assert %OperationDefinition{name: "MyQuery2", operation: :query} = result
end
test "given a non-existing operation name" do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(@input)
result = Document.get_operation(doc, "DoesNotExist")
assert nil == result
end
end
describe "converting to Blueprint" do
test "returns a Blueprint.t" do
assert %Blueprint{} = ir("{ foo }")
assert %Blueprint{} = ir("query { baz }")
assert %Blueprint{} = ir("type Thing { name: String! }")
end
test "returns a Blueprint.t with the right number of operations" do
rep = ir("{ foo } mutation Bar { bar } subscription Baz { baz }")
assert length(rep.directives) == 0
assert length(rep.operations) == 3
assert length(rep.schema_definitions) == 0
assert length(rep.fragments) == 0
end
test "returns a Blueprint.t with the right number of types" do
rep =
"""
type Person
@description(text: "A person object")
{
name: String
}
type Business { name: String}
union Entity = Person | Business
enum Purpose { BUSINESS PLEASURE }
"""
|> ir
assert length(rep.directives) == 0
assert length(rep.operations) == 0
assert length(rep.schema_definitions) == 4
assert length(rep.fragments) == 0
end
test "returns a Blueprint.t with the right number of fragments" do
rep =
"""
query {
myItems {
... ItemFields
... NameField
}
otherItems {
... ItemFields
}
}
fragment ItemFields on Item {
count
}
fragment NameField on NamedThing {
name
}
"""
|> ir
assert length(rep.directives) == 0
assert length(rep.operations) == 1
assert length(rep.schema_definitions) == 0
assert length(rep.fragments) == 2
end
test "returns a Blueprint.t with the right number of directives" do
rep = ir("directive @cs(if: Boolean!) on FIELD")
assert length(rep.directives) == 1
assert length(rep.operations) == 0
assert length(rep.schema_definitions) == 0
assert length(rep.fragments) == 0
end
end
describe "converting to Blueprint for Schema" do
@idl """
enum Episode { NEWHOPE, EMPIRE, JEDI }
scalar Time
interface Character {
id: String!
name: String
friends: [Character]
appearsIn: [Episode]
}
type Human implements Character {
id: String!
name: String
friends: [Character]
appearsIn: [Episode]
homePlanet: String
}
type Droid implements Character {
id: String!
name: String
friends: [Character]
appearsIn: [Episode]
primaryFunction: String
}
type Query {
hero(episode: Episode): Character
human(id: String!): Human
droid(id: String!): Droid
}
type Foo {
name: String
}
type Bar {
name: String
}
input Profile {
name: String!
age: Int = 18
}
union Baz = Foo | Bar
"""
test "creates the correct number of types" do
rep = ir(@idl)
assert length(rep.schema_definitions) == 10
end
end
def ir(input) do
{:ok, blueprint, _} =
Absinthe.Pipeline.run(input, [Absinthe.Phase.Parse, Absinthe.Phase.Blueprint])
blueprint
end
end
defmodule Absinthe.Language.ObjectValueTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
@query """
{
foo(input: {foo: 2}) {
baz
}
}
"""
describe "converting to Blueprint" do
test "builds an Input.Object.t" do
assert %Blueprint.Input.Object{
fields: [
%Blueprint.Input.Field{
name: "foo",
input_value: %Blueprint.Input.RawValue{
content: %Blueprint.Input.Integer{value: 2}
}
}
]
} = from_input(@query)
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Absinthe.Language.Document{definitions: [node]}) do
node.selection_set.selections
|> List.first()
|> Map.get(:arguments)
|> List.first()
|> Map.get(:value)
end
end
defmodule Absinthe.Language.UnionTypeDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
@text "A metasyntactic variable"
@idl """
type Foo {
name: String
}
type Bar {
name: String
}
union Baz @description(text: "#{@text}") =
Foo
| Bar
"""
describe "converting to Blueprint" do
test "works, given a Blueprint Schema 'union' definition" do
assert %Blueprint.Schema.UnionTypeDefinition{
name: "Baz",
types: [
%Blueprint.TypeReference.Name{name: "Foo"},
%Blueprint.TypeReference.Name{name: "Bar"}
],
directives: [%{name: "description"}]
} = from_input(@idl)
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Absinthe.Language.Document{definitions: definitions}) do
definitions |> List.last()
end
end
defmodule Absinthe.Language.InterfaceTypeDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
@text "An Entity"
@idl """
interface Entity
@description(text: "#{@text}")
{
name: String!
}
type Person implements Entity {
name: String!
}
type Business implements Entity {
name: String!
}
"""
describe "converting to Blueprint" do
test "works, given a Blueprint Schema 'interface' definition" do
assert %Blueprint.Schema.InterfaceTypeDefinition{
name: "Entity",
directives: [%{name: "description"}]
} = from_input(@idl)
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Absinthe.Language.Document{definitions: definitions}) do
definitions |> List.first()
end
end
defmodule Absinthe.Language.VariableDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Language}
@query """
query Foo($showFoo: Boolean = true) {
foo @include(if: $showFoo)
}
"""
describe "converting to Blueprint" do
test "builds a VariableDefinition.t" do
assert %Blueprint.Document.VariableDefinition{
name: "showFoo",
type: %Blueprint.TypeReference.Name{name: "Boolean"},
default_value: %Blueprint.Input.Boolean{value: true},
source_location: %Blueprint.SourceLocation{line: 1}
} = from_input(@query)
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Language.Document{definitions: [node]}) do
node.variable_definitions
|> List.first()
end
end
defmodule Absinthe.Language.ObjectTypeDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
describe "converting to Blueprint" do
test "works, given a Blueprint Schema 'type' definition" do
assert %Blueprint.Schema.ObjectTypeDefinition{name: "Person"} =
from_input("type Person { name: String! }")
end
test "works, given a Blueprint Schema 'type' definition and a directive" do
rep =
"""
type Person
@description(text: "A person")
{
name: String!
}
"""
|> from_input
assert %Blueprint.Schema.ObjectTypeDefinition{
name: "Person",
directives: [%{name: "description"}]
} = rep
end
test "works, given a Blueprint Schema 'type' definition that implements an interface" do
rep =
"""
type Person implements Entity {
name: String!
}
"""
|> from_input
assert %Blueprint.Schema.ObjectTypeDefinition{
name: "Person",
interfaces: [%Blueprint.TypeReference.Name{name: "Entity"}]
} = rep
end
test "works, given a Blueprint Schema 'type' definition that implements an interface and uses a directive" do
rep =
"""
type Person implements Entity
@description(text: "A person entity")
{
name: String!
}
"""
|> from_input
assert %Blueprint.Schema.ObjectTypeDefinition{
name: "Person",
interfaces: [%Blueprint.TypeReference.Name{name: "Entity"}],
directives: [%{name: "description"}]
} = rep
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Absinthe.Language.Document{definitions: [node]}) do
node
end
end
defmodule Absinthe.Language.InlineFragmentTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Language}
@query """
{
... on RootQueryType {
foo
bar
}
}
"""
describe "converting to Blueprint" do
test "builds a Document.Fragment.Inline.t" do
assert %Blueprint.Document.Fragment.Inline{
type_condition: %Blueprint.TypeReference.Name{name: "RootQueryType"},
selections: [
%Blueprint.Document.Field{name: "foo"},
%Blueprint.Document.Field{name: "bar"}
]
} = from_input(@query)
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Language.Document{definitions: nodes}) do
op =
nodes
|> List.first()
op.selection_set.selections
|> List.first()
end
end
defmodule Absinthe.Language.FieldDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
@idl """
type Foo {
bar: [String!]!
baz @description(text: "A directive on baz"): Int
quuxes(limit: Int = 4): [Quux]
}
"""
describe "converting to Blueprint" do
test "works, given a Blueprint Schema object field definition" do
{doc, fields} = fields_from_input(@idl)
field_def = fields |> List.first() |> Blueprint.Draft.convert(doc)
assert %Blueprint.Schema.FieldDefinition{
name: "bar",
type: %Blueprint.TypeReference.NonNull{
of_type: %Blueprint.TypeReference.List{
of_type: %Blueprint.TypeReference.NonNull{
of_type: %Blueprint.TypeReference.Name{name: "String"}
}
}
}
} = field_def
end
test "captures directives" do
{doc, fields} = fields_from_input(@idl)
field_def = fields |> Enum.at(1) |> Blueprint.Draft.convert(doc)
assert %Blueprint.Schema.FieldDefinition{name: "baz"} = field_def
end
test "includes argument definitions" do
{doc, fields} = fields_from_input(@idl)
field_def = fields |> Enum.at(2) |> Blueprint.Draft.convert(doc)
assert %Blueprint.Schema.FieldDefinition{
identifier: :quuxes,
name: "quuxes",
type: %Blueprint.TypeReference.List{
of_type: %Blueprint.TypeReference.Name{name: "Quux"}
},
arguments: [
%Blueprint.Schema.InputValueDefinition{
name: "limit",
identifier: :limit,
type: %Blueprint.TypeReference.Name{name: "Int"},
default_value: %Blueprint.Input.Integer{
value: 4,
source_location: %Blueprint.SourceLocation{column: 23, line: 4}
},
source_location: %Absinthe.Blueprint.SourceLocation{column: 10, line: 4}
}
],
source_location: %Absinthe.Blueprint.SourceLocation{column: 3, line: 4}
} == field_def
end
end
defp fields_from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_fields
end
defp extract_fields(%Absinthe.Language.Document{definitions: definitions} = doc) do
fields =
definitions
|> List.first()
|> Map.get(:fields)
{doc, fields}
end
end
defmodule Absinthe.Language.FieldTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
alias Absinthe.Blueprint.{Input}
@query """
{
foo(input: {foo: 2}) {
baz
}
}
"""
@query_with_directive """
query Bar($showFoo: Boolean!) {
foo(input: {foo: 2}) @include(if: $showFoo) {
baz
}
}
"""
describe "converting to Blueprint" do
test "builds a Field.t" do
assert %Blueprint.Document.Field{
name: "foo",
arguments: [
%Input.Argument{
name: "input",
input_value: %Input.RawValue{
content: %Input.Object{
fields: [
%Input.Field{
name: "foo",
input_value: %Input.RawValue{content: %Input.Integer{value: 2}}
}
]
}
}
}
],
source_location: %Blueprint.SourceLocation{line: 2}
} = from_input(@query)
end
test "builds a Field.t when using a directive" do
assert %Blueprint.Document.Field{
name: "foo",
directives: [
%Blueprint.Directive{
name: "include",
arguments: [
%Input.Argument{
name: "if",
input_value: %Input.RawValue{content: %Input.Variable{name: "showFoo"}}
}
],
source_location: %Blueprint.SourceLocation{line: 2}
}
],
arguments: [
%Input.Argument{
name: "input",
input_value: %Input.RawValue{
content: %Input.Object{
fields: [
%Input.Field{
name: "foo",
input_value: %Input.RawValue{content: %Input.Integer{value: 2}}
}
]
}
}
}
],
source_location: %Blueprint.SourceLocation{line: 2}
} = from_input(@query_with_directive)
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Absinthe.Language.Document{definitions: [node]}) do
node.selection_set.selections
|> List.first()
end
end
defmodule Absinthe.Language.FragmentTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Language}
@query """
fragment FooFields on Foo {
foo
bar
}
"""
describe "converting to Blueprint" do
test "builds a Document.Fragment.Named.t" do
assert %Blueprint.Document.Fragment.Named{
name: "FooFields",
type_condition: %Blueprint.TypeReference.Name{name: "Foo"},
selections: [
%Blueprint.Document.Field{name: "foo"},
%Blueprint.Document.Field{name: "bar"}
]
} = from_input(@query)
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Language.Document{definitions: nodes}) do
nodes
|> List.first()
end
end
defmodule Absinthe.Language.OperationDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Language}
@query """
query Foo($showFoo: Boolean = true) {
foo @include(if: $showFoo)
}
"""
describe "converting to Blueprint" do
test "builds a Operation.t" do
assert %Blueprint.Document.Operation{
name: "Foo",
type: :query,
variable_definitions: [
%Blueprint.Document.VariableDefinition{
name: "showFoo",
type: %Blueprint.TypeReference.Name{name: "Boolean"},
default_value: %Blueprint.Input.Boolean{value: true}
}
],
source_location: %Blueprint.SourceLocation{line: 1}
} = from_input(@query)
end
@query """
query Foo($showFoo: Boolean = true) {
foo @include(if: $showFoo)
... QueryBits
}
fragment QueryBits on Query {
bar
}
"""
test "builds a Operation.t including a named fragment spread" do
assert %Blueprint.Document.Operation{
name: "Foo",
type: :query,
variable_definitions: [
%Blueprint.Document.VariableDefinition{
name: "showFoo",
type: %Blueprint.TypeReference.Name{name: "Boolean"},
default_value: %Blueprint.Input.Boolean{value: true}
}
],
source_location: %Blueprint.SourceLocation{line: 1},
selections: [
%Blueprint.Document.Field{name: "foo"},
%Blueprint.Document.Fragment.Spread{name: "QueryBits"}
]
} = from_input(@query)
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Language.Document{definitions: nodes}) do
nodes
|> List.first()
end
end
defmodule Absinthe.Language.ScalarTypeDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
describe "converting to Blueprint" do
test "works, given a Blueprint Schema 'scalar' definition" do
assert %Blueprint.Schema.ScalarTypeDefinition{name: "Time"} = from_input("scalar Time")
end
test "works, given a Blueprint Schema 'scalar' definition with a directive" do
rep =
"""
scalar Time @description(text: "A datetime with a timezone")
"""
|> from_input
assert %Blueprint.Schema.ScalarTypeDefinition{
name: "Time",
directives: [%{name: "description"}]
} = rep
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Absinthe.Language.Document{definitions: [node]}) do
node
end
end
defmodule Absinthe.Language.VariableTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Language}
@query """
query Foo($input: InputObjectSettingFoo = {foo: 2}) {
foo(input: $input) {
baz
}
}
"""
describe "converting to Blueprint" do
test "builds an Input.Variable.t" do
assert %Blueprint.Input.Variable{name: "input"} = from_input(@query)
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Language.Document{definitions: [node]}) do
node.selection_set.selections
|> List.first()
|> Map.get(:arguments)
|> List.first()
|> Map.get(:value)
end
end
defmodule Absinthe.Language.InputObjectTypeDefinitionTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
describe "converting to Blueprint" do
test "works, given a Blueprint Schema 'input' definition" do
assert %Blueprint.Schema.InputObjectTypeDefinition{name: "Profile"} =
from_input("input Profile { name: String! }")
end
test "works, given a Blueprint Schema 'input' definition and a directive" do
rep =
"""
input Profile
@description(text: "A person's profile")
{
name: String!
}
"""
|> from_input
assert %Blueprint.Schema.InputObjectTypeDefinition{
name: "Profile",
directives: [%{name: "description"}],
fields: [
%Blueprint.Schema.InputValueDefinition{
name: "name",
type: %Blueprint.TypeReference.NonNull{
of_type: %Blueprint.TypeReference.Name{name: "String"}
}
}
]
} = rep
end
end
defp from_input(text) do
{:ok, %{input: doc}} = Absinthe.Phase.Parse.run(text)
doc
|> extract_ast_node
|> Blueprint.Draft.convert(doc)
end
defp extract_ast_node(%Absinthe.Language.Document{definitions: [node]}) do
node
end
end
defmodule Absinthe.ExtensionsTest do
use Absinthe.Case, async: true
defmodule Schema do
use Absinthe.Schema
query do
field :foo, :string do
middleware :resolve_foo
end
end
def resolve_foo(res, _opts) do
%{res | value: "hello world", state: :resolved, extensions: %{foo: 1}}
end
end
defmodule MyPhase do
# rolls up the extensions data into a top level result
def run(blueprint, _) do
extensions = get_ext(blueprint.execution.result.fields)
result = Map.put(blueprint.result, :extensions, extensions)
{:ok, %{blueprint | result: result}}
end
defp get_ext([field]) do
field.extensions
end
end
test "sets the extensions on the result properly" do
doc = "{foo}"
pipeline =
Schema
|> Absinthe.Pipeline.for_document()
|> Absinthe.Pipeline.insert_after(Absinthe.Phase.Document.Result, MyPhase)
assert {:ok, bp, _} = Absinthe.Pipeline.run(doc, pipeline)
assert bp.result == %{data: %{"foo" => "hello world"}, extensions: %{foo: 1}}
end
test "Result phase doesn't clober the extensions" do
doc = "{foo}"
pipeline =
Schema
|> Absinthe.Pipeline.for_document()
|> Absinthe.Pipeline.insert_before(Absinthe.Phase.Document.Result, MyPhase)
assert {:ok, bp, _} = Absinthe.Pipeline.run(doc, pipeline)
assert bp.result == %{data: %{"foo" => "hello world"}, extensions: %{foo: 1}}
end
end
defmodule Absinthe.LoggerTest do
use Absinthe.Case, async: true
describe "Absinthe.Logger.filter_variables/1" do
@value "abcd"
@variables %{"token" => @value, "password" => @value, "alsoUnsafe" => @value}
@filtered "[FILTERED]"
test "it filters the set values, with defaults" do
assert %{
"token" => @filtered,
"password" => @filtered,
"alsoUnsafe" => @value
} = Absinthe.Logger.filter_variables(@variables)
end
test "it filters given values" do
assert %{
"token" => @filtered,
"password" => @value,
"alsoUnsafe" => @filtered
} = Absinthe.Logger.filter_variables(@variables, ~w(token alsoUnsafe))
end
end
describe "Absinthe.Logger.document/1" do
@document nil
test "given nil, is [EMPTY]" do
assert "[EMPTY]" = Absinthe.Logger.document(@document)
end
@document ""
test "given an empty string, is also [EMPTY]" do
assert "[EMPTY]" = Absinthe.Logger.document(@document)
end
@document "{ foo }"
test "given a non-empty string, is the document with a leading newline" do
assert @document == Absinthe.Logger.document(@document)
end
@document %Absinthe.Blueprint{name: "name"}
test "given a blueprint document with a name, is [COMPILED#<name>]" do
assert "[COMPILED#<name>]" == Absinthe.Logger.document(@document)
end
@document %Absinthe.Blueprint{}
test "given a blueprint document without a name, is [COMPILED]" do
assert "[COMPILED]" == Absinthe.Logger.document(@document)
end
@document %{}
test "given something else, is inspected" do
assert "%{}" == Absinthe.Logger.document(@document)
end
end
end
defmodule Absinthe.Adapter.LanguageConventionsTest do
use Absinthe.Case, async: true
alias Absinthe.Adapter.LanguageConventions
describe "to_internal_name/2" do
test "converts external camelcase field names to underscore" do
assert "foo_bar" = LanguageConventions.to_internal_name("fooBar", :field)
end
test "converts external camelcase variable names to underscore" do
assert "foo_bar" = LanguageConventions.to_internal_name("fooBar", :variable)
end
end
describe "to_external_name/2" do
test "converts internal underscored field names to camelcase external field names" do
assert "fooBar" = LanguageConventions.to_external_name("foo_bar", :field)
end
test "converts internal underscored variable names to camelcase external variable names" do
assert "fooBar" = LanguageConventions.to_external_name("foo_bar", :variable)
end
end
end
defmodule Absinthe.Schema.NotationTest do
use Absinthe.Case, async: true
@moduletag :pending_schema
describe "arg" do
test "can be under field as an attribute" do
assert_no_notation_error("ArgFieldValid", """
object :foo do
field :picture, :string do
arg :size, :integer
end
end
""")
end
test "can be under directive as an attribute" do
assert_no_notation_error("ArgDirectiveValid", """
directive :test do
arg :if, :boolean
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ArgToplevelInvalid",
"""
arg :name, :string
""",
"Invalid schema notation: `arg` must only be used within `directive`, `field`"
)
end
end
describe "directive" do
test "can be toplevel" do
assert_no_notation_error("DirectiveValid", """
directive :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"DirectiveInvalid",
"""
directive :foo do
directive :bar do
end
end
""",
"Invalid schema notation: `directive` must only be used toplevel"
)
end
end
describe "enum" do
test "can be toplevel" do
assert_no_notation_error("EnumValid", """
enum :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"EnumInvalid",
"""
enum :foo do
enum :bar do
end
end
""",
"Invalid schema notation: `enum` must only be used toplevel"
)
end
end
describe "field" do
test "can be under object as an attribute" do
assert_no_notation_error("FieldObjectValid", """
object :bar do
field :name, :string
end
""")
end
test "can be under input_object as an attribute" do
assert_no_notation_error("FieldInputObjectValid", """
input_object :bar do
field :name, :string
end
""")
end
test "can be under interface as an attribute" do
assert_no_notation_error("FieldInterfaceValid", """
interface :bar do
field :name, :string
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"FieldToplevelInvalid",
"""
field :foo, :string
""",
"Invalid schema notation: `field` must only be used within `input_object`, `interface`, `object`"
)
end
end
describe "input_object" do
test "can be toplevel" do
assert_no_notation_error("InputObjectValid", """
input_object :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"InputObjectInvalid",
"""
input_object :foo do
input_object :bar do
end
end
""",
"Invalid schema notation: `input_object` must only be used toplevel"
)
end
end
describe "instruction" do
test "can be under directive as an attribute" do
assert_no_notation_error("InstructionValid", """
directive :bar do
instruction fn -> :ok end
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"InstructionToplevelInvalid",
"""
instruction fn -> :ok end
""",
"Invalid schema notation: `instruction` must only be used within `directive`"
)
end
test "cannot be within object" do
assert_notation_error(
"InstructionObjectInvalid",
"""
object :foo do
instruction fn -> :ok end
end
""",
"Invalid schema notation: `instruction` must only be used within `directive`"
)
end
end
describe "interface" do
test "can be toplevel" do
assert_no_notation_error("InterfaceToplevelValid", """
interface :foo do
field :name, :string
resolve_type fn _, _ -> :bar end
end
""")
end
test "can be under object as an attribute" do
assert_no_notation_error("InterfaceObjectValid", """
interface :foo do
field :name, :string
resolve_type fn _, _ -> :bar end
end
object :bar do
interface :foo
field :name, :string
end
""")
end
test "cannot be under input_object as an attribute" do
assert_notation_error(
"InterfaceInputObjectInvalid",
"""
interface :foo do
field :name, :string
resolve_type fn _, _ -> :bar end
end
input_object :bar do
interface :foo
end
""",
"Invalid schema notation: `interface` (as an attribute) must only be used within `object`"
)
end
end
describe "interfaces" do
test "can be under object as an attribute" do
assert_no_notation_error("InterfacesValid", """
interface :bar do
field :name, :string
resolve_type fn _, _ -> :foo end
end
object :foo do
field :name, :string
interfaces [:bar]
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"InterfacesInvalid",
"""
interface :bar do
field :name, :string
end
interfaces [:bar]
""",
"Invalid schema notation: `interfaces` must only be used within `object`"
)
end
end
describe "is_type_of" do
test "can be under object as an attribute" do
assert_no_notation_error("IsTypeOfValid", """
object :bar do
is_type_of fn _, _ -> true end
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"IsTypeOfToplevelInvalid",
"""
is_type_of fn _, _ -> true end
""",
"Invalid schema notation: `is_type_of` must only be used within `object`"
)
end
test "cannot be within interface" do
assert_notation_error(
"IsTypeOfInterfaceInvalid",
"""
interface :foo do
is_type_of fn _, _ -> :bar end
end
""",
"Invalid schema notation: `is_type_of` must only be used within `object`"
)
end
end
describe "object" do
test "can be toplevel" do
assert_no_notation_error("ObjectValid", """
object :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"ObjectInvalid",
"""
object :foo do
object :bar do
end
end
""",
"Invalid schema notation: `object` must only be used toplevel"
)
end
test "cannot use reserved identifiers" do
assert_notation_error(
"ReservedIdentifierSubscription",
"""
object :subscription do
end
""",
"Invalid schema notation: cannot create an `object` with reserved identifier `subscription`"
)
assert_notation_error(
"ReservedIdentifierQuery",
"""
object :query do
end
""",
"Invalid schema notation: cannot create an `object` with reserved identifier `query`"
)
assert_notation_error(
"ReservedIdentifierMutation",
"""
object :mutation do
end
""",
"Invalid schema notation: cannot create an `object` with reserved identifier `mutation`"
)
end
end
describe "on" do
test "can be under directive as an attribute" do
assert_no_notation_error("OnValid", """
directive :foo do
on [Foo, Bar]
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"OnInvalid",
"""
on [Foo, Bar]
""",
"Invalid schema notation: `on` must only be used within `directive`"
)
end
end
describe "parse" do
test "can be under scalar as an attribute" do
assert_no_notation_error("ParseValid", """
scalar :foo do
parse &(&1)
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ParseInvalid",
"""
parse &(&1)
""",
"Invalid schema notation: `parse` must only be used within `scalar`"
)
end
end
describe "resolve" do
test "can be under field as an attribute" do
assert_no_notation_error("ResolveValid", """
object :bar do
field :foo, :integer do
resolve fn _, _, _ -> {:ok, 1} end
end
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ResolveInvalid",
"""
resolve fn _, _ -> {:ok, 1} end
""",
"Invalid schema notation: `resolve` must only be used within `field`"
)
end
test "cannot be within object" do
assert_notation_error(
"ResolveInvalid2",
"""
object :foo do
resolve fn _, _ -> {:ok, 1} end
end
""",
"Invalid schema notation: `resolve` must only be used within `field`"
)
end
end
describe "resolve_type" do
test "can be under interface as an attribute" do
assert_no_notation_error("ResolveTypeValidInterface", """
interface :bar do
resolve_type fn _, _ -> :baz end
end
""")
end
test "can be under union as an attribute" do
assert_no_notation_error("ResolveTypeValidUnion", """
union :bar do
resolve_type fn _, _ -> :baz end
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ResolveTypeInvalidToplevel",
"""
resolve_type fn _, _ -> :bar end
""",
"Invalid schema notation: `resolve_type` must only be used within `interface`, `union`"
)
end
test "cannot be within object" do
assert_notation_error(
"ResolveTypeInvalidObject",
"""
object :foo do
resolve_type fn _, _ -> :bar end
end
""",
"Invalid schema notation: `resolve_type` must only be used within `interface`, `union`"
)
end
end
describe "scalar" do
test "can be toplevel" do
assert_no_notation_error("ScalarValid", """
scalar :foo do
end
""")
end
test "cannot be non-toplevel" do
assert_notation_error(
"ScalarInvalid",
"""
scalar :foo do
scalar :bar do
end
end
""",
"Invalid schema notation: `scalar` must only be used toplevel"
)
end
end
describe "serialize" do
test "can be under scalar as an attribute" do
assert_no_notation_error("SerializeValid", """
scalar :foo do
serialize &(&1)
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"SerializeInvalid",
"""
serialize &(&1)
""",
"Invalid schema notation: `serialize` must only be used within `scalar`"
)
end
end
describe "types" do
test "can be under union as an attribute" do
assert_no_notation_error("TypesValid", """
object :audi do
end
object :volvo do
end
union :brand do
types [:audi, :volvo]
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"TypesInvalid",
"types [:foo]",
"Invalid schema notation: `types` must only be used within `union`"
)
end
end
describe "value" do
test "can be under enum as an attribute" do
assert_no_notation_error("ValueValid", """
enum :color do
value :red
value :green
value :blue
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"ValueInvalid",
"value :b",
"Invalid schema notation: `value` must only be used within `enum`"
)
end
end
describe "description" do
test "can be under object as an attribute" do
assert_no_notation_error("DescriptionValid", """
object :item do
description \"""
Here's a description
\"""
end
""")
end
test "cannot be toplevel" do
assert_notation_error(
"DescriptionInvalid",
~s(description "test"),
"Invalid schema notation: `description` must not be used toplevel"
)
end
end
@doc """
Assert a notation error occurs.
## Examples
```
iex> assert_notation_error(\"""
object :bar do
field :name, :string
end
\""")
```
"""
def assert_notation_error(name, text, message) do
assert_raise(Absinthe.Schema.Notation.Error, message, fn ->
"""
defmodule MyTestSchema.#{name} do
use Absinthe.Schema
query do
#Query type must exist
end
#{text}
end
"""
|> Code.eval_string()
end)
end
def assert_no_notation_error(name, text) do
assert """
defmodule MyTestSchema.#{name} do
use Absinthe.Schema
query do
#Query type must exist
end
#{text}
end
"""
|> Code.eval_string()
end
end
defmodule Absinthe.Schema.Rule.ObjectMustImplementInterfacesTest do
use Absinthe.Case, async: true
defmodule Types do
use Absinthe.Schema.Notation
object :user do
interface :named
field :name, :string
end
end
defmodule Schema do
use Absinthe.Schema
import_types Types
interface :named do
field :name, :string
resolve_type fn
%{type: :dog} -> :dog
%{type: :user} -> :dog
_ -> nil
end
end
object :dog do
field :name, :string
interface :named
end
query do
end
end
test "interfaces are propogated across type imports" do
assert %{named: [:dog, :user]} == Schema.__absinthe_interface_implementors__()
end
end
defmodule Absinthe.Schema.Rule.TypeNamesAreReservedTest do
use Absinthe.Case, async: true
alias Absinthe.Schema.Rule
describe "rule" do
@tag :pending_schema
test "is enforced" do
assert_schema_error("prefix_schema", [
%{rule: Rule.TypeNamesAreReserved, data: %{artifact: "type name", value: "__MyThing"}},
%{rule: Rule.TypeNamesAreReserved, data: %{artifact: "field name", value: "__mything"}},
%{rule: Rule.TypeNamesAreReserved, data: %{artifact: "argument name", value: "__myarg"}},
%{
rule: Rule.TypeNamesAreReserved,
data: %{artifact: "directive name", value: "__mydirective"}
},
%{rule: Rule.TypeNamesAreReserved, data: %{artifact: "argument name", value: "__if"}}
])
end
end
end
defmodule Absinthe.Schema.Rule.TypeNamesAreValidTest do
use Absinthe.Case, async: true
@tag :pending_schema
test "Trying to compile a schema with invalid type references fails" do
assert_raise Absinthe.Schema.Error, fn ->
load_schema("bad_types_schema")
end
end
end
defmodule Absinthe.Schema.Rule.QueryTypeMustBeObjectTest do
use Absinthe.Case, async: true
alias Absinthe.Schema.Rule
describe "rule" do
@tag :pending_schema
test "is enforced" do
assert_schema_error("empty_schema", [
%{rule: Rule.QueryTypeMustBeObject, data: %{}}
])
end
end
end
defmodule Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlacedTest do
use Absinthe.Case, async: true
describe "rule" do
@tag :pending_schema
test "is enforced with output types on arguments" do
assert_schema_error("invalid_output_types", [
%{
data: %{
field: :blah,
parent: Absinthe.Type.Object,
struct: Absinthe.Type.InputObject,
type: :input
},
location: %{
file:
"/Users/ben/src/absinthe/test/support/fixtures/dynamic/invalid_output_types.exs",
line: 10
},
rule: Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlaced
},
%{
data: %{argument: :invalid_arg, struct: Absinthe.Type.Object, type: :user},
location: %{
file:
"/Users/ben/src/absinthe/test/support/fixtures/dynamic/invalid_output_types.exs",
line: 4
},
rule: Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlaced
}
])
end
@tag :pending_schema
test "is enforced with input types on arguments" do
assert_schema_error("invalid_input_types", [
%{
data: %{
field: :blah,
parent: Absinthe.Type.InputObject,
struct: Absinthe.Type.Object,
type: :user
},
location: %{
file: "/Users/ben/src/absinthe/test/support/fixtures/dynamic/invalid_input_types.exs",
line: 7
},
rule: Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlaced
}
])
end
end
end
defmodule Absinthe.Schema.Rule.DefaultEnumValuePresentTest do
use Absinthe.Case, async: true
describe "rule" do
@tag :pending_schema
test "is enforced when the defaultValue is not in the enum" do
schema = """
defmodule BadColorSchema do
use Absinthe.Schema
@names %{
r: "RED"
}
query do
field :info,
type: :channel_info,
args: [
channel: [type: non_null(:channel), default_value: :OTHER],
],
resolve: fn
%{channel: channel}, _ ->
{:ok, %{name: @names[channel]}}
end
end
enum :channel do
value :red, as: :r
value :green, as: :g
end
object :channel_info do
field :name, :string
end
end
"""
error = ~r/The default_value for an enum must be present in the enum values/
assert_raise(Absinthe.Schema.Error, error, fn ->
Code.eval_string(schema)
end)
end
end
end
defmodule Absinthe.Schema.Notation.Experimental.ImportTypesTest do
use Absinthe.Case
@moduletag :experimental
defmodule Source do
use Absinthe.Schema.Notation
object :one do
end
object :two do
end
object :three do
end
end
defmodule WithoutOptions do
use Absinthe.Schema.Notation
import_types Source
end
defmodule UsingOnlyOption do
use Absinthe.Schema.Notation
import_types(Source, only: [:one, :two])
end
defmodule UsingExceptOption do
use Absinthe.Schema.Notation
import_types(Source, except: [:one, :two])
end
describe "import_types" do
test "without options" do
assert [{Source, []}] == imports(WithoutOptions)
end
test "with :only" do
assert [{Source, only: [:one, :two]}] == imports(UsingOnlyOption)
end
test "with :except" do
assert [{Source, except: [:one, :two]}] == imports(UsingExceptOption)
end
end
defp imports(module) do
%{schema_definitions: [schema]} = module.__absinthe_blueprint__
schema.imports
end
end
defmodule Absinthe.Schema.Notation.Experimental.ImportSdlTest do
use Absinthe.Case
import ExperimentalNotationHelpers
@moduletag :experimental
@moduletag :sdl
defmodule Definition do
use Absinthe.Schema
import_sdl("""
type Query {
"A list of posts"
posts(filter: PostFilter): [Post]
admin: User!
}
type PostFilter {
name: String
}
"A submitted post"
type Post {
title: String!
body: String!
\"""
The post author
(is a user)
\"""
author: User!
}
""")
import_sdl("""
type User {
name: String!
}
""")
def get_posts(_, _, _) do
posts = [
%{title: "Foo", body: "A body.", author: %{name: "Bruce"}},
%{title: "Bar", body: "A body.", author: %{name: "Ben"}}
]
{:ok, posts}
end
def decorations(%{identifier: :admin}, [%{identifier: :query} | _]) do
{:description, "The admin"}
end
def decorations(%{identifier: :filter}, [%{identifier: :posts} | _]) do
{:description, "A filter argument"}
end
def decorations(%{identifier: :posts}, [%{identifier: :query} | _]) do
{:resolve, &__MODULE__.get_posts/3}
end
def decorations(_node, _ancestors) do
[]
end
end
describe "query root type" do
test "is defined" do
assert %{name: "Query", identifier: :query} = lookup_type(Definition, :query)
end
test "defines fields" do
assert %{name: "posts"} = lookup_field(Definition, :query, :posts)
end
end
describe "non-root type" do
test "is defined" do
assert %{name: "Post", identifier: :post} = lookup_type(Definition, :post)
end
test "defines fields" do
assert %{name: "title"} = lookup_field(Definition, :post, :title)
assert %{name: "body"} = lookup_field(Definition, :post, :body)
end
end
describe "descriptions" do
test "work on objects" do
assert %{description: "A submitted post"} = lookup_type(Definition, :post)
end
test "work on fields" do
assert %{description: "A list of posts"} = lookup_field(Definition, :query, :posts)
end
test "can be multiline" do
assert %{description: "The post author\n(is a user)"} =
lookup_field(Definition, :post, :author)
end
test "can be added by a decoration to a field" do
assert %{description: "The admin"} = lookup_compiled_field(Definition, :query, :admin)
end
test "can be added by a decoration to an argument" do
field = lookup_compiled_field(Definition, :query, :posts)
assert %{description: "A filter argument"} = field.args.filter
end
end
describe "multiple invocations" do
test "can add definitions" do
assert %{name: "User", identifier: :user} = lookup_type(Definition, :user)
end
end
@query """
{ admin { name } }
"""
describe "execution with root_value" do
test "works" do
assert {:ok, %{data: %{"admin" => %{"name" => "Bruce"}}}} =
Absinthe.run(@query, Definition, root_value: %{admin: %{name: "Bruce"}})
end
end
@query """
{ posts { title } }
"""
describe "execution with decoration-defined resolvers" do
test "works" do
assert {:ok, %{data: %{"posts" => [%{"title" => "Foo"}, %{"title" => "Bar"}]}}} =
Absinthe.run(@query, Definition)
end
end
end
defmodule Absinthe.Schema.Notation.Experimental.ImportFieldsTest do
use Absinthe.Case
import ExperimentalNotationHelpers
@moduletag :experimental
defmodule Source do
use Absinthe.Schema.Notation
object :source do
field :one, :string do
end
field :two, :string do
end
field :three, :string do
end
end
end
defmodule WithoutOptions do
use Absinthe.Schema.Notation
object :internal_source do
field :one, :string do
end
field :two, :string do
end
field :three, :string do
end
end
object :internal_target do
import_fields :internal_source
end
object :external_target do
import_fields {Source, :source}
end
end
defmodule UsingOnlyOption do
use Absinthe.Schema.Notation
object :internal_source do
field :one, :string do
end
field :two, :string do
end
field :three, :string do
end
end
object :internal_target do
import_fields :internal_source, only: [:one, :two]
end
object :external_target do
import_fields {Source, :source}, only: [:one, :two]
end
end
defmodule UsingExceptOption do
use Absinthe.Schema.Notation
object :internal_source do
field :one, :string do
end
field :two, :string do
end
field :three, :string do
end
end
object :internal_target do
import_fields :internal_source, except: [:one, :two]
end
object :external_target do
import_fields {Source, :source}, except: [:one, :two]
end
end
describe "import_fields" do
test "without options from an internal source" do
assert [{:internal_source, []}] == imports(WithoutOptions, :internal_target)
end
test "without options from an external source" do
assert [{{Source, :source}, []}] == imports(WithoutOptions, :external_target)
end
test "with :only from an internal source" do
assert [{:internal_source, only: [:one, :two]}] ==
imports(UsingOnlyOption, :internal_target)
end
test "with :only from external source" do
assert [{{Source, :source}, only: [:one, :two]}] ==
imports(UsingOnlyOption, :external_target)
end
test "with :except from an internal source" do
assert [{:internal_source, [except: [:one, :two]]}] ==
imports(UsingExceptOption, :internal_target)
end
test "with :except from external source" do
assert [{{Source, :source}, [except: [:one, :two]]}] ==
imports(UsingExceptOption, :external_target)
end
end
defp imports(module, type) do
lookup_type(module, type).imports
end
end
defmodule Absinthe.Schema.Notation.Experimental.ResolveTest do
use Absinthe.Case
import ExperimentalNotationHelpers
@moduletag :experimental
defmodule Definition do
use Absinthe.Schema.Notation
object :obj do
field :anon_literal, :boolean do
resolve fn _, _, _ ->
{:ok, true}
end
end
field :local_private, :boolean do
resolve &local_private/3
end
field :local_public, :boolean do
resolve &local_public/3
end
field :remote, :boolean do
resolve &Absinthe.Schema.Notation.Experimental.ResolveTest.remote_resolve/3
end
field :remote_ref, :boolean do
resolve {Absinthe.Schema.Notation.Experimental.ResolveTest, :remote_resolve}
end
field :invocation_result, :boolean do
resolve mapping(:foo)
end
end
defp local_private(_, _, _) do
{:ok, true}
end
def local_public(_, _, _) do
{:ok, true}
end
def mapping(_) do
fn _, _, _ ->
{:ok, true}
end
end
end
def remote_resolve(_, _, _) do
{:ok, true}
end
def assert_resolver(field_identifier) do
assert %{middleware: {:ref, module, identifier}} =
lookup_field(Definition, :obj, field_identifier)
assert [{{Absinthe.Resolution, :call}, _}] =
module.__absinthe_function__(identifier, :middleware)
end
describe "resolve" do
test "when given an anonymous function literal" do
assert_resolver(:anon_literal)
end
test "when given a local private function capture" do
assert_resolver(:local_private)
end
test "when given a local public function capture" do
assert_resolver(:local_public)
end
test "when given a remote public function capture" do
assert_resolver(:remote)
end
test "when given a remote ref" do
assert_resolver(:remote_ref)
end
test "when given the result of a function invocation" do
assert_resolver(:invocation_result)
end
end
end
defmodule Absinthe.Schema.Notation.Experimental.ObjectTest do
use Absinthe.Case
import ExperimentalNotationHelpers
@moduletag :experimental
defmodule Definition do
use Absinthe.Schema.Notation
object :no_attrs do
end
object :with_attr, name: "Named" do
end
@desc "Desc One"
object :with_desc do
end
@desc "Desc Two"
object :with_desc_attr, description: "overridden" do
end
@modattr "Desc Three"
@desc @modattr
object :with_desc_assign do
end
object :with_desc_attr_literal, description: "Desc Four" do
end
@desc "Desc Five"
object :with_desc_attr_mod, description: @desc_five do
end
end
describe "object" do
test "without attributes" do
assert %{name: "NoAttrs", identifier: :no_attrs} = lookup_type(Definition, :no_attrs)
end
test "with a name attribute" do
assert %{name: "Named", identifier: :with_attr} = lookup_type(Definition, :with_attr)
end
test "with a @desc and no description attr" do
assert %{description: "Desc One"} = lookup_type(Definition, :with_desc)
end
test "with a @desc using an assignment" do
assert %{description: "Desc Three"} = lookup_type(Definition, :with_desc_assign)
end
test "with a @desc and a description attr" do
assert %{description: "Desc Two"} = lookup_type(Definition, :with_desc_attr)
end
test "with a description attribute as a literal" do
assert %{description: "Desc Four"} = lookup_type(Definition, :with_desc_attr_literal)
end
test "from a module attribute" do
assert %{description: "Desc Five"} = lookup_type(Definition, :with_desc_attr_mod)
end
end
end
defmodule Absinthe.Schema.Notation.Experimental.FieldTest do
use Absinthe.Case
import ExperimentalNotationHelpers
@moduletag :experimental
defmodule Definition do
use Absinthe.Schema.Notation
@desc "Object description"
object :obj do
field :plain, :string
field :with_block, :string do
end
field :with_attrs, type: :boolean, name: "HasAttrs"
field :with_attrs_and_body, type: :boolean, name: "HasAttrsAndBody" do
end
@desc "Desc One"
field :with_desc, :string
@desc "Desc Two"
field :with_desc_and_block, :string do
end
@desc "Desc Three"
field :with_desc_attr, type: :string, description: "overridden"
field :with_desc_attr_literal, type: :string, description: "Desc Four"
@desc "Desc Five"
field :with_desc_attr_mod, type: :string, description: @desc_five
end
end
describe "field" do
test "without a body and with a bare type" do
assert %{name: "plain", description: nil, type: :string, identifier: :plain} =
lookup_field(Definition, :obj, :plain)
end
test "with a body and with a bare type" do
assert %{name: "with_block", type: :string, identifier: :with_block} =
lookup_field(Definition, :obj, :with_block)
end
test "with attrs and without a body" do
assert %{name: "HasAttrs", type: :boolean, identifier: :with_attrs} =
lookup_field(Definition, :obj, :with_attrs)
end
test "with attrs and with a body" do
assert %{name: "HasAttrsAndBody", type: :boolean, identifier: :with_attrs_and_body} =
lookup_field(Definition, :obj, :with_attrs_and_body)
end
test "with @desc and without a block" do
assert %{description: "Desc One"} = lookup_field(Definition, :obj, :with_desc)
end
test "with @desc and with a block" do
assert %{description: "Desc Two"} = lookup_field(Definition, :obj, :with_desc_and_block)
end
test "with @desc and a description attr" do
assert %{description: "Desc Three"} = lookup_field(Definition, :obj, :with_desc_attr)
end
test "with a description attribute as a literal" do
assert %{description: "Desc Four"} = lookup_field(Definition, :obj, :with_desc_attr_literal)
end
test "with a description attribute from a module attribute" do
assert %{description: "Desc Five"} = lookup_field(Definition, :obj, :with_desc_attr_mod)
end
end
end
defmodule Absinthe.Schema.Notation.ImportTest do
use ExUnit.Case, async: true
describe "import fields" do
test "fields can be imported" do
defmodule Foo do
use Absinthe.Schema
query do
# Query type must exist
end
object :foo do
field :name, :string
end
object :bar do
import_fields :foo
field :email, :string
end
end
assert [:email, :name] = Foo.__absinthe_type__(:bar).fields |> Map.keys() |> Enum.sort()
end
test "works for input objects" do
defmodule InputFoo do
use Absinthe.Schema
query do
# Query type must exist
end
input_object :foo do
field :name, :string
end
input_object :bar do
import_fields :foo
field :email, :string
end
end
fields = InputFoo.__absinthe_type__(:bar).fields
assert [:email, :name] = fields |> Map.keys() |> Enum.sort()
end
test "works for interfaces" do
defmodule InterfaceFoo do
use Absinthe.Schema
query do
# Query type must exist
end
object :cool_fields do
field :name, :string
end
interface :foo do
import_fields :cool_fields
resolve_type fn _, _ -> :real_foo end
end
object :real_foo do
interface :foo
import_fields :cool_fields
end
end
interface_fields = InterfaceFoo.__absinthe_type__(:foo).fields
assert [:name] = interface_fields |> Map.keys() |> Enum.sort()
object_fields = InterfaceFoo.__absinthe_type__(:real_foo).fields
assert [:name] = object_fields |> Map.keys() |> Enum.sort()
end
test "can work transitively" do
defmodule Bar do
use Absinthe.Schema
query do
# Query type must exist
end
object :foo do
field :name, :string
end
object :bar do
import_fields :foo
field :email, :string
end
object :baz do
import_fields :bar
field :age, :integer
end
end
assert [:age, :email, :name] ==
Bar.__absinthe_type__(:baz).fields |> Map.keys() |> Enum.sort()
end
@tag :pending_schema
test "raises errors nicely" do
defmodule ErrorSchema do
use Absinthe.Schema.Notation
object :bar do
import_fields :asdf
field :email, :string
end
end
assert [error] = ErrorSchema.__absinthe_errors__()
assert %{
data: %{
artifact:
"Field Import Error\n\nObject :bar imports fields from :asdf but\n:asdf does not exist in the schema!",
value: :asdf
},
location: %{file: _, line: _},
rule: Absinthe.Schema.Rule.FieldImportsExist
} = error
end
@tag :pending_schema
test "handles circular errors" do
defmodule Circles do
use Absinthe.Schema.Notation
object :foo do
import_fields :bar
field :name, :string
end
object :bar do
import_fields :foo
field :email, :string
end
end
assert [error] = Circles.__absinthe_errors__()
assert %{
data: %{
artifact:
"Field Import Cycle Error\n\nField Import in object `foo' `import_fields(:bar) forms a cycle via: (`foo' => `bar' => `foo')",
value: :bar
},
location: %{file: _, line: _},
rule: Absinthe.Schema.Rule.NoCircularFieldImports
} = error
end
test "can import types from more than one thing" do
defmodule Multiples do
use Absinthe.Schema
object :foo do
field :name, :string
end
object :bar do
field :email, :string
end
query do
import_fields :foo
import_fields :bar
field :age, :integer
end
end
assert [:age, :email, :name] ==
Multiples.__absinthe_type__(:query).fields |> Map.keys() |> Enum.sort()
end
test "can import fields from imported types" do
defmodule Source1 do
use Absinthe.Schema
query do
# Query type must exist
end
object :foo do
field :name, :string
end
end
defmodule Source2 do
use Absinthe.Schema
query do
# Query type must exist
end
object :bar do
field :email, :string
end
end
defmodule Dest do
use Absinthe.Schema
query do
# Query type must exist
end
import_types Source1
import_types Source2
object :baz do
import_fields :foo
import_fields :bar
end
end
assert [:email, :name] = Dest.__absinthe_type__(:baz).fields |> Map.keys() |> Enum.sort()
end
end
end
defmodule Absinthe.Schema.ExperimentalTest do
use Absinthe.Case
@moduletag :experimental
defmodule Schema do
use Absinthe.Schema
query do
field :user, non_null(:user) do
resolve fn _, _ ->
{:ok, %{first_name: "Bruce", last_name: "Williams"}}
end
end
field :hello, :string do
arg :name, :string
resolve fn %{name: name}, _ ->
{:ok, "hello #{name}"}
end
end
end
@desc "user"
object :user do
@desc "their full name"
field :full_name, :string do
resolve fn user, _, _ ->
{:ok, "#{user.first_name} #{user.last_name}"}
end
end
end
end
describe "__absinthe_blueprint__/0" do
test "returns the blueprint" do
assert 2 ==
length(
Schema.__absinthe_blueprint__().schema_definitions
|> List.first()
|> Map.fetch!(:type_definitions)
)
end
end
describe "type lookup" do
test "it works on objects" do
assert %Absinthe.Type.Object{} = type = Absinthe.Schema.lookup_type(Schema, :user)
assert %{fields: %{full_name: field}} = type
assert field.identifier == :full_name
assert field.middleware != []
end
end
test "simple" do
query = """
{ user { fullName }}
"""
assert %Absinthe.Type.Object{} = type = Absinthe.Schema.lookup_type(Schema, :query)
assert %{fields: %{user: _field}} = type
assert {:ok, %{data: %{"user" => %{"fullName" => "Bruce Williams"}}}} ==
Absinthe.run(query, Schema)
end
@tag :simple
test "simple input" do
query = """
{ hello(name: "bob") }
"""
assert {:ok, %{data: %{"hello" => "hello bob"}}} == Absinthe.run(query, Schema)
end
end
defmodule Absinthe.Type.BuiltIns.ScalarsTest do
use Absinthe.Case, async: true
alias Absinthe.Type
defmodule TestSchema do
use Absinthe.Schema
query do
# Query type must exist
end
end
@max_int 9_007_199_254_740_991
@min_int -9_007_199_254_740_991
defp serialize(type, value) do
TestSchema.__absinthe_type__(type)
|> Type.Scalar.serialize(value)
end
defp parse(type, value) do
TestSchema.__absinthe_type__(type)
|> Type.Scalar.parse(value)
end
describe ":integer" do
test "serializes as an integer" do
assert 1 == serialize(:integer, 1)
end
test "can be parsed from an integer within the valid range" do
assert {:ok, 0} == parse(:integer, 0)
assert {:ok, 1} == parse(:integer, 1)
assert {:ok, -1} == parse(:integer, -1)
assert {:ok, @max_int} == parse(:integer, @max_int)
assert {:ok, @min_int} == parse(:integer, @min_int)
assert :error == parse(:integer, @max_int + 1)
assert :error == parse(:integer, @min_int - 1)
end
test "cannot be parsed from a float" do
assert :error == parse(:integer, 0.0)
end
test "cannot be parsed from a binary" do
assert :error == parse(:integer, "")
assert :error == parse(:integer, "0")
end
end
describe ":float" do
test "serializes as a float" do
assert 1.0 == serialize(:float, 1.0)
end
test "can be parsed from an integer" do
assert {:ok, 0.0} == parse(:float, 0)
assert {:ok, 1.0} == parse(:float, 1)
assert {:ok, -1.0} == parse(:float, -1)
end
test "can be parsed from a float" do
assert {:ok, 0.0} == parse(:float, 0.0)
assert {:ok, 1.9} == parse(:float, 1.9)
assert {:ok, -1.9} == parse(:float, -1.9)
end
test "cannot be parsed from a binary" do
assert :error == parse(:float, "")
assert :error == parse(:float, "0.0")
end
end
describe ":string" do
test "serializes as a string" do
assert "" == serialize(:string, "")
assert "string" == serialize(:string, "string")
end
test "can be parsed from a binary" do
assert {:ok, ""} == parse(:string, "")
assert {:ok, "string"} == parse(:string, "string")
end
test "cannot be parsed from an integer" do
assert :error == parse(:string, 0)
end
test "cannot be parsed from a float" do
assert :error == parse(:string, 1.9)
end
end
describe ":id" do
test "serializes as a string" do
assert "1" == serialize(:id, 1)
assert "1" == serialize(:id, "1")
end
test "can be parsed from a binary" do
assert {:ok, ""} == parse(:id, "")
assert {:ok, "abc123"} == parse(:id, "abc123")
end
test "can be parsed from an integer" do
assert {:ok, "0"} == parse(:id, 0)
assert {:ok, Integer.to_string(@max_int)} == parse(:id, @max_int)
assert {:ok, Integer.to_string(@min_int)} == parse(:id, @min_int)
end
test "cannot be parsed from a float" do
assert :error == parse(:id, 1.9)
end
end
describe ":boolean" do
test "serializes as a boolean" do
assert true == serialize(:boolean, true)
assert false == serialize(:boolean, false)
end
test "can be parsed from a boolean" do
assert {:ok, true} == parse(:boolean, true)
assert {:ok, false} == parse(:boolean, false)
end
test "cannot be parsed from a number" do
assert :error == parse(:boolean, 0)
assert :error == parse(:boolean, 0.0)
end
test "cannot be parsed from a binary" do
assert :error == parse(:boolean, "true")
assert :error == parse(:boolean, "false")
end
end
end
defmodule Absinthe.Type.ImportTypesTest do
use Absinthe.Case, async: true
alias Absinthe.Fixtures.ImportTypes
describe "import_types" do
test "works with a plain atom" do
assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :receipt)
end
test "works with {}" do
assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :customer)
assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :employee)
assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :order)
end
test "works with an alias and plain atom" do
assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :weekly_schedule)
end
test "works with an alias and {}" do
assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :mailing_address)
assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :contact_method)
assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :contact_kind)
end
test "works with an alias, {} and scoped reference" do
assert Absinthe.Schema.lookup_type(ImportTypes.Schema, :avatar)
end
end
end
defmodule Absinthe.Type.InterfaceTest do
use Absinthe.Case, async: true
alias Absinthe.Schema.Rule
defmodule Schema do
use Absinthe.Schema
query do
field :foo, type: :foo
field :bar, type: :bar
field :named_thing, :named do
resolve fn _, _ ->
{:ok, %{}}
end
end
end
object :foo do
field :name, :string
is_type_of fn _ ->
true
end
interface :named
end
object :bar do
field :name, :string
is_type_of fn _ ->
true
end
interface :named
end
# NOT USED IN THE QUERY
object :baz do
field :name, :string
is_type_of fn _ ->
true
end
interfaces [:named]
end
interface :named do
description "An interface"
field :name, :string
resolve_type fn _, _ ->
# just a value
nil
end
end
end
describe "interface" do
test "can be defined" do
obj = Schema.__absinthe_type__(:named)
assert %Absinthe.Type.Interface{name: "Named", description: "An interface"} = obj
assert Absinthe.Type.function(obj, :resolve_type)
end
test "captures the relationships in the schema" do
implementors = Map.get(Schema.__absinthe_interface_implementors__(), :named, [])
assert :foo in implementors
assert :bar in implementors
# Not directly in query, but because it's
# an available type and there's a field that
# defines the interface as a type
assert :baz in implementors
end
test "can find implementors" do
obj = Schema.__absinthe_type__(:named)
assert length(Absinthe.Schema.implementors(Schema, obj)) == 3
end
end
describe "an object that implements an interface" do
@graphql """
query {
contact {
entity { name }
}
}
"""
test "with the interface as a field type, can select fields that are declared by the interface" do
assert_data(
%{"contact" => %{"entity" => %{"name" => "Bruce"}}},
run(@graphql, Absinthe.Fixtures.ContactSchema)
)
end
@graphql """
query {
contact {
entity { name age }
}
}
"""
test "with the interface as a field type, can't select fields from an implementing type without 'on'" do
assert_error_message(
~s(Cannot query field "age" on type "NamedEntity". Did you mean to use an inline fragment on "Person"?),
run(@graphql, Absinthe.Fixtures.ContactSchema)
)
end
@graphql """
query {
contact {
entity {
name
... on Person { age }
}
}
}
"""
test "with the interface as a field type, can select fields from an implementing type with 'on'" do
assert_data(
%{"contact" => %{"entity" => %{"name" => "Bruce", "age" => 35}}},
run(@graphql, Absinthe.Fixtures.ContactSchema)
)
end
end
describe "when it doesn't define those fields" do
@tag :pending_schema
test "reports schema errors" do
assert_schema_error("bad_interface_schema", [
%{rule: Rule.ObjectMustImplementInterfaces, data: %{object: "Foo", interface: "Aged"}},
%{rule: Rule.ObjectMustImplementInterfaces, data: %{object: "Foo", interface: "Named"}},
%{rule: Rule.ObjectInterfacesMustBeValid, data: %{object: "Quux", interface: "Foo"}},
%{rule: Rule.InterfacesMustResolveTypes, data: "Named"}
])
end
end
defmodule InterfaceSchema do
use Absinthe.Schema
# Example data
@box %{
item: %{name: "Computer", cost: 1000}
}
query do
field :box,
type: :box,
args: [],
resolve: fn _, _ ->
{:ok, @box}
end
end
object :box do
field :item, :valued_item
interface :has_item
is_type_of fn _ -> true end
end
interface :has_item do
field :item, :item
end
object :valued_item do
field :name, :string
field :cost, :integer
interface :item
is_type_of fn _ -> true end
end
interface :item do
field :name, :string
end
end
@graphql """
query {
box {
item {
name
cost
}
}
}
"""
test "can query an interface field type's fields" do
assert_data(
%{"box" => %{"item" => %{"name" => "Computer", "cost" => 1000}}},
run(@graphql, InterfaceSchema)
)
end
@graphql """
query {
box {
... on HasItem {
item {
name
}
}
}
}
"""
test "can query an interface field using a fragment and access its type's fields" do
assert_data(%{"box" => %{"item" => %{"name" => "Computer"}}}, run(@graphql, InterfaceSchema))
end
@graphql """
query {
box {
... on HasItem {
item {
name
... on ValuedItem {
cost
}
}
}
}
}
"""
test "can query InterfaceSubtypeSchema treating box as HasItem and item as ValuedItem" do
assert_data(
%{"box" => %{"item" => %{"name" => "Computer", "cost" => 1000}}},
run(@graphql, InterfaceSchema)
)
end
@graphql """
query {
box {
... on HasItem {
item {
name
cost
}
}
}
}
"""
test "rejects querying InterfaceSubtypeSchema treating box as HasItem asking for cost" do
assert_error_message(
~s(Cannot query field "cost" on type "Item". Did you mean to use an inline fragment on "ValuedItem"?),
run(@graphql, InterfaceSchema)
)
end
@graphql """
query {
namedThing {
name
}
}
"""
test "works even when resolve_type returns nil" do
assert_data(%{"namedThing" => %{}}, run(@graphql, Schema))
end
end
defmodule Absinthe.Type.InputObjectTest do
use Absinthe.Case, async: true
defmodule Schema do
use Absinthe.Schema
query do
# Query type must exist
end
@desc "A profile"
input_object :profile do
field :name, :string
field :profile_picture, :string
end
end
describe "input object types" do
test "can be defined" do
assert %Absinthe.Type.InputObject{name: "Profile", description: "A profile"} =
Schema.__absinthe_type__(:profile)
assert %{profile: "Profile"} = Schema.__absinthe_types__()
end
test "can define fields" do
obj = Schema.__absinthe_type__(:profile)
assert %Absinthe.Type.Field{name: "name", type: :string} = obj.fields.name
end
end
end
defmodule Absinthe.Type.UnionTest do
use Absinthe.Case, async: true
alias Absinthe.Type
defmodule TestSchema do
use Absinthe.Schema
query do
# Query type must exist
end
object :person do
description "A person"
field :name, :string
field :age, :integer
end
object :business do
description "A business"
field :name, :string
field :employee_count, :integer
end
union :search_result do
description "A search result"
types [:person, :business]
resolve_type fn
%{age: _}, _ ->
:person
%{employee_count: _}, _ ->
:business
end
end
object :foo do
field :name, :string
is_type_of fn
%{name: _} -> true
_ -> false
end
end
union :other_result do
types [:foo]
end
end
describe "union" do
test "can be defined" do
obj = TestSchema.__absinthe_type__(:search_result)
assert %Absinthe.Type.Union{
name: "SearchResult",
description: "A search result",
types: [:business, :person]
} = obj
assert Absinthe.Type.function(obj, :resolve_type)
end
test "can resolve the type of an object using resolve_type" do
obj = TestSchema.__absinthe_type__(:search_result)
assert %Type.Object{name: "Person"} =
Type.Union.resolve_type(obj, %{age: 12}, %{schema: TestSchema})
assert %Type.Object{name: "Business"} =
Type.Union.resolve_type(obj, %{employee_count: 12}, %{schema: TestSchema})
end
test "can resolve the type of an object using is_type_of" do
obj = TestSchema.__absinthe_type__(:other_result)
assert %Type.Object{name: "Foo"} =
Type.Union.resolve_type(obj, %{name: "asdf"}, %{schema: TestSchema})
end
end
end
defmodule Absinthe.Type.ObjectTest do
use Absinthe.Case, async: true
defmodule Schema do
use Absinthe.Schema
query do
# Must exist
end
@desc "A person"
object :person do
description "A person"
field :name, :string
field :profile_picture, :string do
arg :width, :integer
arg :height, :integer
end
end
end
describe "object types" do
test "can be defined" do
assert %Absinthe.Type.Object{name: "Person", description: "A person"} =
Schema.__absinthe_type__(:person)
assert %{person: "Person"} = Schema.__absinthe_types__()
end
test "can define fields" do
obj = Schema.__absinthe_type__(:person)
assert %Absinthe.Type.Field{name: "name", type: :string} = obj.fields.name
end
test "can define field arguments" do
field = Schema.__absinthe_type__(:person).fields.profile_picture
assert %Absinthe.Type.Argument{name: "width", type: :integer} = field.args.width
assert %Absinthe.Type.Argument{name: "height", type: :integer} = field.args.height
end
end
end
defmodule Absinthe.Type.CustomTest do
use Absinthe.Case, async: true
alias Absinthe.Type
alias Absinthe.Blueprint.Input
defmodule TestSchema do
use Absinthe.Schema
import_types Type.Custom
query do
end
end
@datetime %DateTime{
year: 2017,
month: 1,
day: 27,
hour: 20,
minute: 31,
second: 55,
time_zone: "Etc/UTC",
zone_abbr: "UTC",
utc_offset: 0,
std_offset: 0
}
@naive_datetime ~N[2017-01-27 20:31:55]
@date ~D[2017-01-27]
@time ~T[20:31:55]
@decimal Decimal.new("-3.49")
@decimal_int Decimal.new("3")
defp serialize(type, value) do
TestSchema.__absinthe_type__(type)
|> Type.Scalar.serialize(value)
end
defp parse(type, value) do
TestSchema.__absinthe_type__(type)
|> Type.Scalar.parse(value)
end
describe ":datetime" do
test "serializes as an ISO8601 date and time string with UTC timezone marker" do
assert "2017-01-27T20:31:55Z" == serialize(:datetime, @datetime)
end
test "can be parsed from an ISO8601 date and time string including timezone" do
assert {:ok, @datetime} == parse(:datetime, %Input.String{value: "2017-01-27T20:31:55Z"})
assert {:ok, @datetime} == parse(:datetime, %Input.String{value: "2017-01-27 20:31:55Z"})
end
test "can be parsed from an ISO8601 date and time string including zero UTC offset" do
assert {:ok, @datetime} ==
parse(:datetime, %Input.String{value: "2017-01-27T20:31:55+00:00"})
end
test "cannot be parsed when a non-zero UTC offset is included" do
assert :error == parse(:datetime, %Input.String{value: "2017-01-27T20:31:55-02:30"})
assert :error == parse(:datetime, %Input.String{value: "2017-01-27T20:31:55+04:00"})
end
test "cannot be parsed without UTC timezone marker" do
assert :error == parse(:datetime, %Input.String{value: "2017-01-27T20:31:55"})
assert :error == parse(:datetime, %Input.String{value: "2017-01-27 20:31:55"})
end
test "cannot be parsed when date or time is missing" do
assert :error == parse(:datetime, %Input.String{value: "2017-01-27"})
assert :error == parse(:datetime, %Input.String{value: "20:31:55"})
end
test "cannot be parsed from a binary not formatted according to ISO8601" do
assert :error == parse(:datetime, %Input.String{value: "abc123"})
assert :error == parse(:datetime, %Input.String{value: "01/25/2017 20:31:55"})
assert :error == parse(:datetime, %Input.String{value: "2017-15-42T31:71:95Z"})
end
end
describe ":naive_datetime" do
test "serializes as an ISO8601 date and time string" do
assert "2017-01-27T20:31:55" == serialize(:naive_datetime, @naive_datetime)
end
test "can be parsed from an ISO8601 date and time string" do
assert {:ok, @naive_datetime} ==
parse(:naive_datetime, %Input.String{value: "2017-01-27T20:31:55Z"})
assert {:ok, @naive_datetime} ==
parse(:naive_datetime, %Input.String{value: "2017-01-27 20:31:55Z"})
assert {:ok, @naive_datetime} ==
parse(:naive_datetime, %Input.String{value: "2017-01-27 20:31:55"})
end
test "cannot be parsed when date or time is missing" do
assert :error == parse(:naive_datetime, %Input.String{value: "2017-01-27"})
assert :error == parse(:naive_datetime, %Input.String{value: "20:31:55"})
end
test "cannot be parsed from a binary not formatted according to ISO8601" do
assert :error == parse(:naive_datetime, %Input.String{value: "abc123"})
assert :error == parse(:naive_datetime, %Input.String{value: "01/25/2017 20:31:55"})
assert :error == parse(:naive_datetime, %Input.String{value: "2017-15-42T31:71:95"})
end
end
describe ":date" do
test "serializes as an ISO8601 date string" do
assert "2017-01-27" == serialize(:date, @date)
end
test "can be parsed from an ISO8601 date string" do
assert {:ok, @date} == parse(:date, %Input.String{value: "2017-01-27"})
end
test "cannot be parsed when time is included" do
assert :error == parse(:date, %Input.String{value: "2017-01-27T20:31:55Z"})
assert :error == parse(:date, %Input.String{value: "2017-01-27 20:31:55Z"})
assert :error == parse(:date, %Input.String{value: "2017-01-27 20:31:55"})
end
test "cannot be parsed when date is missing" do
assert :error == parse(:date, %Input.String{value: "20:31:55"})
end
test "cannot be parsed from a binary not formatted according to ISO8601" do
assert :error == parse(:date, %Input.String{value: "abc123"})
assert :error == parse(:date, %Input.String{value: "01/25/2017 20:31:55"})
assert :error == parse(:date, %Input.String{value: "2017-15-42T31:71:95Z"})
end
end
describe ":time" do
test "serializes as an ISO8601 time string" do
assert "20:31:55" == serialize(:time, @time)
end
test "can be parsed from an ISO8601 date string" do
assert {:ok, @time} == parse(:time, %Input.String{value: "20:31:55"})
end
test "cannot be parsed when date is included" do
assert :error == parse(:time, %Input.String{value: "2017-01-27T20:31:55Z"})
assert :error == parse(:time, %Input.String{value: "2017-01-27 20:31:55Z"})
assert :error == parse(:time, %Input.String{value: "2017-01-27 20:31:55"})
end
test "cannot be parsed when time is missing" do
assert :error == parse(:time, %Input.String{value: "2017-01-27"})
end
test "cannot be parsed from a binary not formatted according to ISO8601" do
assert :error == parse(:time, %Input.String{value: "abc123"})
assert :error == parse(:time, %Input.String{value: "01/25/2017 20:31:55"})
assert :error == parse(:time, %Input.String{value: "2017-15-42T31:71:95Z"})
end
end
describe ":decimal" do
test "serializes as a string" do
assert "-3.49" == serialize(:decimal, @decimal)
assert "3" == serialize(:decimal, @decimal_int)
end
test "can be parsed from a numeric string" do
assert {:ok, decimal} = parse(:decimal, %Input.String{value: "-3.49"})
assert Decimal.cmp(@decimal, decimal) == :eq
end
test "can be parsed from a float" do
assert {:ok, decimal} = parse(:decimal, %Input.Float{value: -3.49})
assert Decimal.cmp(@decimal, decimal) == :eq
end
test "can be parsed from an integer" do
assert {:ok, decimal} = parse(:decimal, %Input.Integer{value: 3})
assert Decimal.cmp(@decimal_int, decimal) == :eq
end
test "cannot be parsed from alphanumeric string" do
assert :error == parse(:decimal, %Input.String{value: "23.4 abc"})
end
end
end
defmodule Absinthe.Type.EnumTest do
use Absinthe.Case, async: true
alias Absinthe.Type
defmodule TestSchema do
use Absinthe.Schema
query do
field :channel, :color_channel, description: "The active color channel" do
resolve fn _, _ ->
{:ok, :red}
end
end
end
enum :color_channel do
description "The selected color channel"
value :red, as: :r, description: "Color Red"
value :green, as: :g, description: "Color Green"
value :blue, as: :b, description: "Color Blue"
value :alpha,
as: :a,
deprecate: "We no longer support opacity settings",
description: "Alpha Channel"
end
enum :color_channel2 do
description "The selected color channel"
value :red, description: "Color Red"
value :green, description: "Color Green"
value :blue, description: "Color Blue"
value :alpha,
as: :a,
deprecate: "We no longer support opacity settings",
description: "Alpha Channel"
end
enum :color_channel3,
values: [:red, :green, :blue, :alpha],
description: "The selected color channel"
end
describe "enums" do
test "can be defined by a map with defined values" do
type = TestSchema.__absinthe_type__(:color_channel)
assert %Type.Enum{} = type
assert %Type.Enum.Value{name: "RED", value: :r, description: "Color Red"} =
type.values[:red]
end
test "can be defined by a map without defined values" do
type = TestSchema.__absinthe_type__(:color_channel2)
assert %Type.Enum{} = type
assert %Type.Enum.Value{name: "RED", value: :red} = type.values[:red]
end
test "can be defined by a shorthand list of atoms" do
type = TestSchema.__absinthe_type__(:color_channel3)
assert %Type.Enum{} = type
assert %Type.Enum.Value{name: "RED", value: :red, description: nil} = type.values[:red]
end
end
end
defmodule Absinthe.Type.DirectiveTest do
use Absinthe.Case, async: true
alias Absinthe.Schema
defmodule TestSchema do
use Absinthe.Schema
query do
field :nonce, :string
end
end
describe "directives" do
test "are loaded as built-ins" do
assert %{skip: "skip", include: "include"} = TestSchema.__absinthe_directives__()
assert TestSchema.__absinthe_directive__(:skip)
assert TestSchema.__absinthe_directive__("skip") == TestSchema.__absinthe_directive__(:skip)
assert Schema.lookup_directive(TestSchema, :skip) ==
TestSchema.__absinthe_directive__(:skip)
assert Schema.lookup_directive(TestSchema, "skip") ==
TestSchema.__absinthe_directive__(:skip)
end
end
describe "the `@skip` directive" do
@query_field """
query Test($skipPerson: Boolean) {
person @skip(if: $skipPerson) {
name
}
}
"""
test "is defined" do
assert Schema.lookup_directive(Absinthe.Fixtures.ContactSchema, :skip)
end
test "behaves as expected for a field" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} ==
Absinthe.run(
@query_field,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipPerson" => false}
)
assert {:ok, %{data: %{}}} ==
Absinthe.run(
@query_field,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipPerson" => true}
)
assert_result(
{:ok,
%{errors: [%{message: ~s(In argument "if": Expected type "Boolean!", found null.)}]}},
run(@query_field, Absinthe.Fixtures.ContactSchema)
)
end
@query_fragment """
query Test($skipAge: Boolean) {
person {
name
...Aging @skip(if: $skipAge)
}
}
fragment Aging on Person {
age
}
"""
test "behaves as expected for a fragment" do
assert_result(
{:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}},
run(@query_fragment, Absinthe.Fixtures.ContactSchema, variables: %{"skipAge" => false})
)
assert_result(
{:ok, %{data: %{"person" => %{"name" => "Bruce"}}}},
run(@query_fragment, Absinthe.Fixtures.ContactSchema, variables: %{"skipAge" => true})
)
assert_result(
{:ok,
%{errors: [%{message: ~s(In argument "if": Expected type "Boolean!", found null.)}]}},
run(@query_fragment, Absinthe.Fixtures.ContactSchema)
)
end
end
describe "the `@include` directive" do
@query_field """
query Test($includePerson: Boolean) {
person @include(if: $includePerson) {
name
}
}
"""
test "is defined" do
assert Schema.lookup_directive(Absinthe.Fixtures.ContactSchema, :include)
end
test "behaves as expected for a field" do
assert_result(
{:ok, %{data: %{"person" => %{"name" => "Bruce"}}}},
run(@query_field, Absinthe.Fixtures.ContactSchema, variables: %{"includePerson" => true})
)
assert_result(
{:ok, %{data: %{}}},
run(@query_field, Absinthe.Fixtures.ContactSchema, variables: %{"includePerson" => false})
)
assert_result(
{:ok,
%{
errors: [
%{
locations: [%{column: 19, line: 2}],
message: ~s(In argument "if": Expected type "Boolean!", found null.)
}
]
}},
run(@query_field, Absinthe.Fixtures.ContactSchema)
)
end
@query_fragment """
query Test($includeAge: Boolean) {
person {
name
...Aging @include(if: $includeAge)
}
}
fragment Aging on Person {
age
}
"""
test "behaves as expected for a fragment" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(
@query_fragment,
Absinthe.Fixtures.ContactSchema,
variables: %{"includeAge" => true}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} ==
Absinthe.run(
@query_fragment,
Absinthe.Fixtures.ContactSchema,
variables: %{"includeAge" => false}
)
end
test "should return an error if the variable is not supplied" do
assert {:ok, %{errors: errors}} =
Absinthe.run(@query_fragment, Absinthe.Fixtures.ContactSchema)
assert [] != errors
end
end
describe "for inline fragments without type conditions" do
@query """
query Q($skipAge: Boolean = false) {
person {
name
... @skip(if: $skipAge) {
age
}
}
}
"""
test "works as expected" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipAge" => true}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipAge" => false}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema)
end
end
describe "for inline fragments with type conditions" do
@query """
query Q($skipAge: Boolean = false) {
person {
name
... on Person @skip(if: $skipAge) {
age
}
}
}
"""
test "works as expected" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce"}}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipAge" => true}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(
@query,
Absinthe.Fixtures.ContactSchema,
variables: %{"skipAge" => false}
)
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema)
end
end
end
defmodule Absinthe.Type.DeprecationTest do
use Absinthe.Case, async: true
alias Absinthe.Type
defmodule TestSchema do
use Absinthe.Schema
query do
# Query type must exist
end
object :profile do
description "A profile"
field :name, :string
field :profile_picture,
type: :string,
args: [
width: [type: :integer],
height: [type: :integer],
size: [type: :string, deprecate: "Not explicit enough"],
source: [type: :string, deprecate: true]
]
field :email_address, :string do
deprecate "privacy"
end
field :address, :string, deprecate: true
end
end
describe "fields" do
test "can be deprecated" do
obj = TestSchema.__absinthe_type__(:profile)
assert Type.deprecated?(obj.fields.email_address)
assert "privacy" == obj.fields.email_address.deprecation.reason
assert Type.deprecated?(obj.fields.address)
assert nil == obj.fields.address.deprecation.reason
end
end
describe "arguments" do
test "can be deprecated" do
field = TestSchema.__absinthe_type__(:profile).fields.profile_picture
assert Type.deprecated?(field.args.size)
assert "Not explicit enough" == field.args.size.deprecation.reason
assert Type.deprecated?(field.args.source)
assert nil == field.args.source.deprecation.reason
end
end
end
defmodule Absinthe.Resolution.ProjectorTest do
use ExUnit.Case, async: true
# describe "merging" do
# test "asdf"
# end
end
defmodule Absinthe.MiddlewareTest do
use Absinthe.Case, async: true
defmodule Auth do
def call(res, _) do
case res.context do
%{current_user: _} ->
res
_ ->
res
|> Absinthe.Resolution.put_result({:error, "unauthorized"})
end
end
end
defmodule Schema do
use Absinthe.Schema
alias Absinthe.MiddlewareTest
def middleware(middleware, _field, %Absinthe.Type.Object{identifier: :secret_object}) do
# can't inline due to Elixir bug.
fun = &auth/2
[fun | middleware]
end
def middleware(middleware, _field, _) do
middleware
end
def auth(res, _) do
case res.context do
%{current_user: _} ->
res
_ ->
res
|> Absinthe.Resolution.put_result({:error, "unauthorized"})
end
end
query do
field :authenticated, :user do
middleware MiddlewareTest.Auth
resolve fn _, _, _ ->
{:ok, %{name: "bob"}}
end
end
field :public, :user do
resolve fn _, _, _ ->
{:ok, %{name: "bob", email: "secret"}}
end
end
field :returns_private_object, :secret_object do
resolve fn _, _, _ ->
{:ok, %{key: "value"}}
end
end
field :from_context, :string do
middleware fn res, _ ->
%{res | context: %{value: "yooooo"}}
end
resolve fn _, %{context: context} ->
{:ok, context.value}
end
end
field :path, :path do
resolve fn _, _ -> {:ok, %{}} end
end
end
object :path do
field :path, :path, resolve: fn _, _ -> {:ok, %{}} end
field :result, list_of(:string) do
resolve fn _, info ->
{:ok, Absinthe.Resolution.path_string(info)}
end
end
end
# keys in this object are made secret via the def middleware callback
object :secret_object do
field :key, :string
field :key2, :string
end
object :user do
field :email, :string do
middleware MiddlewareTest.Auth
middleware Absinthe.Middleware.MapGet, :email
middleware fn res, _ ->
# no-op, mostly making sure this form works
res
end
end
field :name, :string
end
end
test "fails with authorization error when no current user" do
doc = """
{authenticated { name }}
"""
assert {:ok, %{errors: errors}} = Absinthe.run(doc, __MODULE__.Schema)
assert [
%{
locations: [%{column: 2, line: 1}],
message: "unauthorized",
path: ["authenticated"]
}
] == errors
end
test "email fails with authorization error when no current user" do
doc = """
{public { name email }}
"""
assert {:ok, %{errors: errors}} = Absinthe.run(doc, __MODULE__.Schema)
assert [
%{
locations: [%{column: 16, line: 1}],
message: "unauthorized",
path: ["public", "email"]
}
] == errors
end
test "email works when current user" do
doc = """
{public { name email }}
"""
assert {:ok, %{data: data}} =
Absinthe.run(doc, __MODULE__.Schema, context: %{current_user: %{}})
assert %{"public" => %{"email" => "secret", "name" => "bob"}} == data
end
test "secret object cant be accessed without a current user" do
doc = """
{returnsPrivateObject { key }}
"""
assert {:ok, %{errors: errors}} = Absinthe.run(doc, __MODULE__.Schema)
assert [
%{
locations: [%{column: 25, line: 1}],
message: "unauthorized",
path: ["returnsPrivateObject", "key"]
}
] == errors
end
test "secret object can be accessed with a current user" do
doc = """
{returnsPrivateObject { key }}
"""
assert {:ok, %{data: %{"returnsPrivateObject" => %{"key" => "value"}}}} ==
Absinthe.run(doc, __MODULE__.Schema, context: %{current_user: %{}})
end
test "it can modify the context" do
doc = """
{fromContext}
"""
assert {:ok, %{data: data}} =
Absinthe.run(doc, __MODULE__.Schema, context: %{current_user: %{}})
assert %{"fromContext" => "yooooo"} == data
end
test "it gets the path of the current field" do
doc = """
{foo: path { bar: path { result }}}
"""
assert {:ok, %{data: data}} =
Absinthe.run(doc, __MODULE__.Schema, context: %{current_user: %{}})
assert %{"foo" => %{"bar" => %{"result" => ["result", "bar", "foo", "RootQueryType"]}}} ==
data
end
end
defmodule Absinthe.ResolutionTest do
use ExUnit.Case, async: true
defmodule Schema do
use Absinthe.Schema
interface :named do
field :name, :string
resolve_type fn _, _ -> :user end
end
object :user do
interface :named
field :id, :id
field :name, :string
end
query do
field :user, :user do
resolve fn _, info ->
fields = Absinthe.Resolution.project(info) |> Enum.map(& &1.name)
# ghetto escape hatch
send(self(), {:fields, fields})
{:ok, nil}
end
end
end
end
test "project/1 works" do
doc = """
{ user { id, name } }
"""
{:ok, _} = Absinthe.run(doc, Schema)
assert_receive({:fields, fields})
assert ["id", "name"] == fields
end
test "project/1 works with fragments and things" do
doc = """
{
user {
... on User {
id
}
... on Named {
name
}
}
}
"""
{:ok, _} = Absinthe.run(doc, Schema)
assert_receive({:fields, fields})
assert ["id", "name"] == fields
end
end
defmodule Absinthe.Execution.InlineFragmentsTest do
use Absinthe.Case, async: true
@query """
{
person {
name
... on Person {
age
}
}
}
"""
test "adds fields in a simple case" do
assert {:ok, %{data: %{"person" => %{"name" => "Bruce", "age" => 35}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema)
end
@query """
query Q($business: Boolean = false) {
contact(business: $business) {
entity {
name
... on Person {
age
}
... on Business {
employeeCount
}
}
}
}
"""
test "adds fields in an interface query based on a type" do
assert {:ok, %{data: %{"contact" => %{"entity" => %{"name" => "Bruce", "age" => 35}}}}} ==
run(@query, Absinthe.Fixtures.ContactSchema, variables: %{"business" => false})
end
test "adds fields in an interface query based on another type" do
assert {:ok,
%{
data: %{"contact" => %{"entity" => %{"name" => "Someplace", "employeeCount" => 11}}}
}} == run(@query, Absinthe.Fixtures.ContactSchema, variables: %{"business" => true})
end
end
defmodule Absinthe.Execution.SubscriptionTest do
use ExUnit.Case
import ExUnit.CaptureLog
defmodule PubSub do
@behaviour Absinthe.Subscription.Pubsub
def start_link() do
Registry.start_link(keys: :unique, name: __MODULE__)
end
def subscribe(topic) do
Registry.register(__MODULE__, topic, [])
:ok
end
def publish_subscription(topic, data) do
message = %{
topic: topic,
event: "subscription:data",
result: data
}
Registry.dispatch(__MODULE__, topic, fn entries ->
for {pid, _} <- entries, do: send(pid, {:broadcast, message})
end)
end
def publish_mutation(_proxy_topic, _mutation_result, _subscribed_fields) do
# this pubsub is local and doesn't support clusters
:ok
end
end
defmodule Schema do
use Absinthe.Schema
query do
# Query type must exist
end
object :user do
field :id, :id
field :name, :string
field :group, :group do
resolve fn user, _, %{context: %{test_pid: pid}} ->
batch({__MODULE__, :batch_get_group, pid}, nil, fn _results ->
{:ok, user.group}
end)
end
end
end
object :group do
field :name, :string
end
def batch_get_group(test_pid, _) do
# send a message to the test process every time we access this function.
# if batching is working properly, it should only happen once.
send(test_pid, :batch_get_group)
%{}
end
subscription do
field :raises, :string do
config fn _, _ ->
{:ok, topic: "*"}
end
resolve fn _, _, _ ->
raise "boom"
end
end
field :user, :user do
arg :id, :id
config fn args, _ ->
{:ok, topic: args[:id] || "*"}
end
trigger :update_user,
topic: fn user ->
[user.id, "*"]
end
end
field :thing, :string do
arg :client_id, non_null(:id)
config fn
_args, %{context: %{authorized: false}} ->
{:error, "unauthorized"}
args, _ ->
{
:ok,
topic: args.client_id
}
end
end
end
mutation do
field :update_user, :user do
arg :id, non_null(:id)
resolve fn _, %{id: id}, _ ->
{:ok, %{id: id, name: "foo"}}
end
end
end
end
setup_all do
{:ok, _} = PubSub.start_link()
{:ok, _} = Absinthe.Subscription.start_link(PubSub)
:ok
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId)
}
"""
test "can subscribe the current process" do
client_id = "abc"
assert {:ok, %{"subscribed" => topic}} =
run(
@query,
Schema,
variables: %{"clientId" => client_id},
context: %{pubsub: PubSub}
)
Absinthe.Subscription.publish(PubSub, "foo", thing: client_id)
assert_receive({:broadcast, msg})
assert %{
event: "subscription:data",
result: %{data: %{"thing" => "foo"}},
topic: topic
} == msg
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId, extra: 1)
}
"""
test "can return errors properly" do
assert {
:ok,
%{
errors: [
%{
locations: [%{column: 30, line: 2}],
message:
"Unknown argument \"extra\" on field \"thing\" of type \"RootSubscriptionType\"."
}
]
}
} == run(@query, Schema, variables: %{"clientId" => "abc"}, context: %{pubsub: PubSub})
end
@query """
subscription ($userId: ID!) {
user(id: $userId) { id name }
}
"""
test "subscription triggers work" do
id = "1"
assert {:ok, %{"subscribed" => topic}} =
run(
@query,
Schema,
variables: %{"userId" => id},
context: %{pubsub: PubSub}
)
mutation = """
mutation ($userId: ID!) {
updateUser(id: $userId) { id name }
}
"""
assert {:ok, %{data: _}} =
run(mutation, Schema,
variables: %{"userId" => id},
context: %{pubsub: PubSub}
)
assert_receive({:broadcast, msg})
assert %{
event: "subscription:data",
result: %{data: %{"user" => %{"id" => "1", "name" => "foo"}}},
topic: topic
} == msg
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId)
}
"""
test "can return an error tuple from the topic function" do
assert {:ok, %{errors: [%{locations: [%{column: 3, line: 2}], message: "unauthorized"}]}} ==
run(
@query,
Schema,
variables: %{"clientId" => "abc"},
context: %{pubsub: PubSub, authorized: false}
)
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId)
}
"""
test "stringifies topics" do
assert {:ok, %{"subscribed" => topic}} =
run(@query, Schema, variables: %{"clientId" => "1"}, context: %{pubsub: PubSub})
Absinthe.Subscription.publish(PubSub, "foo", thing: 1)
assert_receive({:broadcast, msg})
assert %{
event: "subscription:data",
result: %{data: %{"thing" => "foo"}},
topic: topic
} == msg
end
test "isn't tripped up if one of the subscription docs raises" do
assert {:ok, %{"subscribed" => _}} = run("subscription { raises }", Schema)
assert {:ok, %{"subscribed" => topic}} = run("subscription { thing(clientId: \"*\")}", Schema)
error_log =
capture_log(fn ->
Absinthe.Subscription.publish(PubSub, "foo", raises: "*", thing: "*")
assert_receive({:broadcast, msg})
assert %{
event: "subscription:data",
result: %{data: %{"thing" => "foo"}},
topic: topic
} == msg
end)
assert String.contains?(error_log, "boom")
end
test "different subscription docs are batched together" do
opts = [context: %{test_pid: self()}]
assert {:ok, %{"subscribed" => doc1}} =
run("subscription { user { group { name } id} }", Schema, opts)
# different docs required for test, otherwise they get deduplicated from the start
assert {:ok, %{"subscribed" => doc2}} =
run("subscription { user { group { name } id name} }", Schema, opts)
user = %{id: "1", name: "Alicia", group: %{name: "Elixir Users"}}
Absinthe.Subscription.publish(PubSub, user, user: ["*", user.id])
assert_receive({:broadcast, %{topic: ^doc1, result: %{data: _}}})
assert_receive({:broadcast, %{topic: ^doc2, result: %{data: %{"user" => user}}}})
assert user["group"]["name"] == "Elixir Users"
# we should get this just once due to batching
assert_receive(:batch_get_group)
refute_receive(:batch_get_group)
end
test "subscription docs with different contexts don't leak context" do
ctx1 = %{test_pid: self(), user: 1}
assert {:ok, %{"subscribed" => doc1}} =
run("subscription { user { group { name } id} }", Schema, context: ctx1)
ctx2 = %{test_pid: self(), user: 2}
# different docs required for test, otherwise they get deduplicated from the start
assert {:ok, %{"subscribed" => doc2}} =
run("subscription { user { group { name } id name} }", Schema, context: ctx2)
user = %{id: "1", name: "Alicia", group: %{name: "Elixir Users"}}
Absinthe.Subscription.publish(PubSub, user, user: ["*", user.id])
assert_receive({:broadcast, %{topic: ^doc1, result: %{data: _}}})
assert_receive({:broadcast, %{topic: ^doc2, result: %{data: %{"user" => user}}}})
assert user["group"]["name"] == "Elixir Users"
# we should get this twice since the different contexts prevent batching.
assert_receive(:batch_get_group)
assert_receive(:batch_get_group)
end
defp run(query, schema, opts \\ []) do
opts = Keyword.update(opts, :context, %{pubsub: PubSub}, &Map.put(&1, :pubsub, PubSub))
case Absinthe.run(query, schema, opts) do
{:ok, %{"subscribed" => topic}} = val ->
PubSub.subscribe(topic)
val
val ->
val
end
end
end
defmodule Absinthe.Execution.ListTest.Schema do
use Absinthe.Schema
object :item do
field :categories, list_of(:string)
end
object :book do
field :name, :string
end
query do
field :numbers, list_of(:integer), resolve: fn _, _ -> {:ok, [1, 2, 3]} end
field :categories, list_of(:string) do
resolve fn _, _ ->
{:ok, ["foo", "bar", "baz"]}
end
end
field :items, list_of(:item) do
resolve fn _, _ ->
items = [
%{categories: ["foo", "bar"]},
%{categories: ["baz", "buz"]}
]
{:ok, items}
end
end
field :list_of_list_of_numbers, list_of(list_of(:integer)) do
resolve fn _, _ -> {:ok, [[1, 2, 3], [4, 5, 6]]} end
end
field :big_nesting_of_numbers, list_of(list_of(list_of(list_of(:integer)))) do
resolve fn _, _ ->
list = [
[
[
[1, 2, 3],
[4, 5, 6]
],
[
[7, 8, 9]
],
[
[10, 11, 12]
]
]
]
{:ok, list}
end
end
field :list_of_list_of_books, list_of(list_of(:book)) do
resolve fn _, _ ->
books = [
[
%{name: "foo"},
%{name: "bar"}
],
[
%{name: "baz"}
]
]
{:ok, books}
end
end
field :list_of_list_of_items, list_of(list_of(:item)) do
resolve fn _, _ ->
items = [
[
%{categories: ["foo", "bar"]},
%{categories: ["baz", "buz"]}
],
[
%{categories: ["blip", "blop"]}
]
]
{:ok, items}
end
end
end
end
defmodule Absinthe.Execution.ListTest do
use Absinthe.Case, async: true
@query """
{
categories
}
"""
test "should resolve list of strings" do
assert {:ok, %{data: %{"categories" => ["foo", "bar", "baz"]}}} ==
Absinthe.run(@query, __MODULE__.Schema)
end
@query """
{
numbers
}
"""
test "should resolve list of numbers" do
assert {:ok, %{data: %{"numbers" => [1, 2, 3]}}} == Absinthe.run(@query, __MODULE__.Schema)
end
@query """
{
items {
categories
}
}
"""
test "should resolve list of objects with a list of scalars inside" do
assert {:ok,
%{
data: %{
"items" => [%{"categories" => ["foo", "bar"]}, %{"categories" => ["baz", "buz"]}]
}
}} == Absinthe.run(@query, __MODULE__.Schema)
end
@query """
{
listOfListOfNumbers
}
"""
test "should resolve list of list of numbers" do
assert {:ok, %{data: %{"listOfListOfNumbers" => [[1, 2, 3], [4, 5, 6]]}}} ==
Absinthe.run(@query, __MODULE__.Schema)
end
@query """
{
bigNestingOfNumbers
}
"""
test "should resolve list of lists of... numbers with a depth of 4" do
list = [
[
[
[1, 2, 3],
[4, 5, 6]
],
[
[7, 8, 9]
],
[
[10, 11, 12]
]
]
]
assert {:ok, %{data: %{"bigNestingOfNumbers" => list}}} ==
Absinthe.run(@query, __MODULE__.Schema)
end
@query """
{
listOfListOfBooks {
name
}
}
"""
test "should resolve list of list of books" do
books = [
[
%{"name" => "foo"},
%{"name" => "bar"}
],
[
%{"name" => "baz"}
]
]
assert {:ok, %{data: %{"listOfListOfBooks" => books}}} ==
Absinthe.run(@query, __MODULE__.Schema)
end
@query """
{
listOfListOfItems {
categories
}
}
"""
test "should resolve list of list of items" do
items = [
[
%{"categories" => ["foo", "bar"]},
%{"categories" => ["baz", "buz"]}
],
[
%{"categories" => ["blip", "blop"]}
]
]
assert {:ok, %{data: %{"listOfListOfItems" => items}}} ==
Absinthe.run(@query, __MODULE__.Schema)
end
end
defmodule Absinthe.Execution.FragmentSpreadTest do
use Absinthe.Case, async: true
@query """
query AbstractFragmentSpread {
firstSearchResult {
...F0
}
}
fragment F0 on SearchResult {
...F1
__typename
}
fragment F1 on Person {
age
}
"""
test "spreads fragments with abstract target" do
assert {:ok, %{data: %{"firstSearchResult" => %{"__typename" => "Person", "age" => 35}}}} ==
Absinthe.run(@query, Absinthe.Fixtures.ContactSchema)
end
test "spreads errors fragments that don't refer to a real type" do
query = """
query {
__typename
}
fragment F0 on Foo {
name
}
"""
assert {:ok,
%{errors: [%{locations: [%{column: 1, line: 4}], message: "Unknown type \"Foo\"."}]}} ==
Absinthe.run(query, Absinthe.Fixtures.ContactSchema)
end
test "errors properly when spreading fragments that don't exist" do
query = """
query {
__typename
... NonExistentFragment
}
"""
assert {:ok,
%{
errors: [
%{
locations: [%{column: 3, line: 3}],
message: "Unknown fragment \"NonExistentFragment\""
}
]
}} == Absinthe.run(query, Absinthe.Fixtures.ContactSchema)
end
end
defmodule Absinthe.Execution.ArgumentsTest do
use Absinthe.Case, async: true
@schema Absinthe.Fixtures.ArgumentsSchema
@graphql """
query {
contact
}
"""
test "when nullable, if omitted should still be passed as an argument map to the resolver" do
assert_data(%{"contact" => nil}, run(@graphql, @schema))
end
@graphql """
query {
requiredThing
}
"""
test "when non-nullable and missing, returns an appropriate error" do
assert_error_message(
~s(In argument "name": Expected type "InputName!", found null.),
run(@graphql, @schema)
)
end
describe "errors" do
@graphql """
query FindUser {
user(contact: {email: "[email protected]", contactType: 1})
}
"""
test "should adapt internal field names on error" do
assert_error_message_lines(
[
~s(Argument "contact" has invalid value {email: "[email protected]", contactType: 1}.),
~s(In field "contactType": Expected type "ContactType", found 1.)
],
run(@graphql, @schema)
)
end
end
end
defmodule Absinthe.Execution.DefaultResolverTest do
use Absinthe.Case, async: true
@root %{:foo => "baz", "bar" => "quux"}
@query "{ foo bar }"
describe "without a custom default resolver defined" do
defmodule NormalSchema do
use Absinthe.Schema
query do
field :foo, :string
field :bar, :string
end
end
test "should resolve using atoms" do
assert {:ok, %{data: %{"foo" => "baz", "bar" => nil}}} ==
Absinthe.run(@query, NormalSchema, root_value: @root)
end
end
describe "with a custom default resolver defined" do
defmodule CustomSchema do
use Absinthe.Schema
query do
field :foo, :string
field :bar, :string
end
def middleware(middleware, %{name: name, identifier: identifier} = field, obj) do
middleware_spec =
Absinthe.Resolution.resolver_spec(fn parent, _, _ ->
case parent do
%{^name => value} -> {:ok, value}
%{^identifier => value} -> {:ok, value}
_ -> {:ok, nil}
end
end)
Absinthe.Schema.replace_default(middleware, middleware_spec, field, obj)
end
def middleware(middleware, _, _) do
middleware
end
end
test "should resolve using as defined" do
assert {:ok, %{data: %{"foo" => "baz", "bar" => "quux"}}} ==
Absinthe.run(@query, CustomSchema, root_value: @root)
end
end
end
defmodule Absinthe.Execution.Arguments.BooleanTest do
use Absinthe.Case, async: true
@schema Absinthe.Fixtures.ArgumentsSchema
@graphql """
query ($flag: Boolean!) {
something(flag: $flag)
}
"""
test "variables are passed as arguments to resolution functions correctly" do
assert_data(%{"something" => "YES"}, run(@graphql, @schema, variables: %{"flag" => true}))
assert_data(%{"something" => "NO"}, run(@graphql, @schema, variables: %{"flag" => false}))
end
@graphql """
query ($flag: Boolean) {
something(flag: $flag)
}
"""
test "if a variable is not provided schema default value is used" do
assert_data(%{"something" => "NO"}, run(@graphql, @schema))
end
test "literals are passed as arguments to resolution functions correctly" do
assert_data(%{"something" => "YES"}, run(~s<query { something(flag: true) }>, @schema))
assert_data(%{"something" => "NO"}, run(~s<query { something(flag: false) }>, @schema))
assert_data(%{"something" => "NO"}, run(~s<query { something }>, @schema))
end
@graphql """
query {
something(flag: {foo: 1})
}
"""
test "returns a correct error when passed the wrong type" do
assert_error_message_lines(
[
~s(Argument "flag" has invalid value {foo: 1}.),
~s(In field \"foo\": Unknown field.)
],
run(@graphql, @schema)
)
end
end
defmodule Absinthe.Execution.Arguments.InputObjectTest do
use Absinthe.Case, async: true
@schema Absinthe.Fixtures.ArgumentsSchema
@graphql """
query ($contact: ContactInput!) {
user(contact: $contact)
}
"""
test "as variable, should work when nested" do
assert_data(
%{"user" => "[email protected]"},
run(
@graphql,
@schema,
variables: %{"contact" => %{"email" => "[email protected]", "contactType" => "Email"}}
)
)
end
@graphql """
query ($contact: ContactInput!) {
user(contact: $contact)
}
"""
test "using variables, works in a basic case" do
assert_data(
%{"user" => "[email protected]"},
run(@graphql, @schema, variables: %{"contact" => %{"email" => "[email protected]"}})
)
end
@graphql """
query ($email: String) {
contacts(contacts: [{email: $email}, {email: $email}])
}
"""
test "using inner variables" do
assert_data(
%{"contacts" => ["[email protected]", "[email protected]"]},
run(@graphql, @schema, variables: %{"email" => "[email protected]"})
)
end
@graphql """
query ($input: InputStuff!) {
stuff(stuff: $input)
}
"""
test "enforces non_null fields in input passed as variable" do
assert_error_message_lines(
[
~s(Argument "stuff" has invalid value $input.),
~s(In field "nonNullField": Expected type "String!", found null.)
],
run(@graphql, @schema, variables: %{"input" => %{"value" => 5, "nonNullField" => nil}})
)
assert_error_message_lines(
[
~s(Argument "stuff" has invalid value $input.),
~s(In field "nonNullField": Expected type "String!", found null.)
],
run(@graphql, @schema, variables: %{"input" => %{"value" => 5}})
)
end
@graphql """
query ($email: String, $defaultWithString: String) {
user(contact: {email: $email, defaultWithString: $defaultWithString})
}
"""
test "can set field default values" do
assert_data(
%{"user" => "[email protected]"},
run(@graphql, @schema, variables: %{"email" => "[email protected]"})
)
end
@graphql """
query ($email: String) {
contacts(contacts: [{email: $email}, {email: $email}])
}
"""
test "with inner variables, when no variables are given, returns an error" do
assert_error_message_lines(
[
~s(Argument "contacts" has invalid value [{email: $email}, {email: $email}].),
~s(In element #1: Expected type "ContactInput", found {email: $email}.),
~s(In field "email": Expected type "String!", found $email.),
~s(In element #2: Expected type "ContactInput", found {email: $email}.),
~s(In field "email": Expected type "String!", found $email.)
],
run(@graphql, @schema, variables: %{})
)
end
@graphql """
query {
user(contact: {email: "[email protected]"})
}
"""
test "using literals, works in a basic case" do
assert_data(%{"user" => "[email protected]"}, run(@graphql, @schema))
end
@graphql """
query {
testBooleanInputObject(input: {flag: false})
}
"""
test "works with inner booleans set to false" do
# This makes sure we don't accidentally filter out booleans when trying
# to filter out nils
assert_data(%{"testBooleanInputObject" => false}, run(@graphql, @schema))
end
@graphql """
query {
user(contact: {email: "[email protected]", nestedContactInput: {email: "foo"}})
}
"""
test "works in a nested case" do
assert_data(%{"user" => "[email protected]"}, run(@graphql, @schema))
end
@graphql """
query {
user(contact: {foo: "buz"})
}
"""
test "returns the correct error if an inner field is marked non null but is missing" do
assert_error_message_lines(
[
~s(Argument "contact" has invalid value {foo: "buz"}.),
~s(In field "email": Expected type "String!", found null.),
~s(In field "foo": Unknown field.)
],
run(@graphql, @schema)
)
end
@graphql """
query {
user(contact: {email: "bubba", foo: "buz"})
}
"""
test "returns an error if extra fields are given" do
assert_error_message_lines(
[
~s(Argument "contact" has invalid value {email: "bubba", foo: "buz"}.),
~s(In field "foo": Unknown field.)
],
run(@graphql, @schema)
)
end
end
defmodule Absinthe.Execution.Arguments.ListTest do
use Absinthe.Case, async: true
@schema Absinthe.Fixtures.ArgumentsSchema
@graphql """
query ($contacts: [ContactInput]) {
contacts(contacts: $contacts)
}
"""
test "when missing for a non-null argument, should raise an error" do
msg = ~s(In argument "contacts": Expected type "[ContactInput]!", found null.)
assert_error_message(msg, run(@graphql, @schema))
end
@graphql """
query ($numbers: [Int!]!) {
numbers(numbers: $numbers)
}
"""
test "using variables, works with basic scalars" do
assert_data(%{"numbers" => [1, 2]}, run(@graphql, @schema, variables: %{"numbers" => [1, 2]}))
end
@graphql """
query ($names: [Name!]!) {
names(names: $names)
}
"""
test "works with custom scalars" do
assert_data(
%{"names" => ["Joe", "bob"]},
run(@graphql, @schema, variables: %{"names" => ["Joe", "bob"]})
)
end
@graphql """
query ($contacts: [ContactInput]) {
contacts(contacts: $contacts)
}
"""
test "using variables, works with input objects" do
assert_data(
%{"contacts" => ["[email protected]", "[email protected]"]},
run(
@graphql,
@schema,
variables: %{
"contacts" => [
%{"email" => "[email protected]"},
%{"email" => "[email protected]"}
]
}
)
)
end
@graphql """
query ($contact: ContactInput) {
contacts(contacts: [$contact, $contact])
}
"""
test "with inner variables" do
assert_data(
%{"contacts" => ["[email protected]", "[email protected]"]},
run(@graphql, @schema, variables: %{"contact" => %{"email" => "[email protected]"}})
)
end
@graphql """
query ($contact: ContactInput) {
contacts(contacts: [$contact, $contact])
}
"""
test "with inner variables when no variables are given" do
assert_data(%{"contacts" => []}, run(@graphql, @schema, variables: %{}))
end
@graphql """
query {
names(names: ["Joe", "bob"])
}
"""
test "custom scalars literals can be included" do
assert_data(%{"names" => ["Joe", "bob"]}, run(@graphql, @schema))
end
@graphql """
query {
numbers(numbers: [1, 2])
}
"""
test "using literals, works with basic scalars" do
assert_data(%{"numbers" => [1, 2]}, run(@graphql, @schema))
end
@graphql """
query {
listOfLists(items: [["foo"], ["bar", "baz"]])
}
"""
test "works with nested lists" do
assert_data(%{"listOfLists" => [["foo"], ["bar", "baz"]]}, run(@graphql, @schema))
end
@graphql """
query {
numbers(numbers: 1)
}
"""
test "it will coerce a non list item if it's of the right type" do
# per https://facebook.github.io/graphql/#sec-Lists
assert_data(%{"numbers" => [1]}, run(@graphql, @schema))
end
@graphql """
query {
contacts(contacts: [{email: "[email protected]"}, {email: "[email protected]"}])
}
"""
test "using literals, works with input objects" do
assert_data(%{"contacts" => ["[email protected]", "[email protected]"]}, run(@graphql, @schema))
end
@graphql """
query {
contacts(contacts: [{email: "[email protected]"}, {foo: "[email protected]"}])
}
"""
test "returns deeply nested errors" do
assert_error_message_lines(
[
~s(Argument "contacts" has invalid value [{email: "[email protected]"}, {foo: "[email protected]"}].),
~s(In element #2: Expected type "ContactInput", found {foo: "[email protected]"}.),
~s(In field "email": Expected type "String!", found null.),
~s(In field "foo": Unknown field.)
],
run(@graphql, @schema)
)
end
end
defmodule Absinthe.Execution.Arguments.EnumTest do
use Absinthe.Case, async: true
@schema Absinthe.Fixtures.ArgumentsSchema
@graphql """
query {
contact(type: "bagel")
}
"""
test "for invalid values, returns an error with" do
assert_error_message(~s(Argument "type" has invalid value "bagel".), run(@graphql, @schema))
end
@graphql """
query ($type: ContactType) {
contact(type: $type)
}
"""
test "should pass nil as an argument to the resolver for enum types" do
assert_data(%{"contact" => nil}, run(@graphql, @schema, variables: %{"type" => nil}))
end
@graphql """
query {
contact(type: Email)
}
"""
test "should work with valid values" do
assert_data(%{"contact" => "Email"}, run(@graphql, @schema))
end
@graphql """
query {
contact(type: "bagel")
}
"""
test "should return an error with invalid values" do
assert_error_message(~s(Argument "type" has invalid value "bagel".), run(@graphql, @schema))
end
@graphql """
query ($type: ContactType){
contact(type: $type)
}
"""
test "as variable, should work with valid values" do
assert_data(%{"contact" => "Email"}, run(@graphql, @schema, variables: %{"type" => "Email"}))
end
end
defmodule Absinthe.Execution.Arguments.ScalarTest do
use Absinthe.Case, async: true
@schema Absinthe.Fixtures.ArgumentsSchema
@graphql """
query {
requiredThing(name: "bob")
}
"""
test "works when specified as non null" do
assert_data(%{"requiredThing" => "bob"}, run(@graphql, @schema))
end
@graphql """
query {
something(name: "bob")
}
"""
test "works when passed to resolution" do
assert_data(%{"something" => "bob"}, run(@graphql, @schema))
end
end
defmodule Absinthe.PipelineTest do
use Absinthe.Case, async: true
alias Absinthe.{Blueprint, Pipeline, Phase}
defmodule Schema do
use Absinthe.Schema
query do
# Query type must exist
end
end
describe ".run an operation" do
@query """
{ foo { bar } }
"""
test "can create a blueprint" do
pipeline =
Pipeline.for_document(Schema)
|> Pipeline.upto(Phase.Blueprint)
assert {:ok, %Blueprint{}, [Phase.Blueprint, Phase.Parse]} = Pipeline.run(@query, pipeline)
end
end
describe ".run an idl" do
@query """
type Person {
name: String!
}
"""
@tag :pending_schema
test "can create a blueprint without a prototype schema" do
assert {:ok, %Blueprint{}, _} = Pipeline.run(@query, Pipeline.for_schema(nil))
end
@tag :pending_schema
test "can create a blueprint with a prototype schema" do
assert {:ok, %Blueprint{}, _} = Pipeline.run(@query, Pipeline.for_schema(Schema))
end
end
defmodule Phase1 do
use Phase
def run(input, _) do
{:ok, String.reverse(input)}
end
end
defmodule Phase2 do
use Phase
def run(input, options) do
do_run(input, Map.new(options))
end
def do_run(input, %{times: times}) do
result =
1..times
|> Enum.map(fn _ -> input end)
|> Enum.join(".")
{:ok, result}
end
end
defmodule Phase3 do
use Phase
def run(input, options) do
do_run(input, Enum.into(options, %{}))
end
defp do_run(input, %{reverse: true}) do
{:ok, String.reverse(input)}
end
defp do_run(input, %{reverse: false}) do
{:ok, input}
end
end
describe ".run with options" do
test "should work" do
assert {:ok, "oof.oof.oof", [Phase3, Phase2, Phase1]} ==
Pipeline.run("foo", [Phase1, {Phase2, times: 3}, {Phase3, reverse: false}])
assert {:ok, "foo.foo.foo", [Phase3, Phase2, Phase1]} ==
Pipeline.run("foo", [Phase1, {Phase2, times: 3}, {Phase3, reverse: true}])
end
end
defmodule BadPhase do
use Phase
def run(input, _) do
input
end
end
describe ".run with a bad phase result" do
test "should return a nice error object" do
assert {:error, "Last phase did not return a valid result tuple.", [BadPhase]} ==
Pipeline.run("foo", [BadPhase])
end
end
@pipeline [A, B, C, D, {E, [name: "e"]}, F]
describe ".before" do
test "raises an exception if one can't be found" do
assert_raise RuntimeError, fn -> Pipeline.before([], Anything) end
end
test "returns the phases before" do
assert [] == Pipeline.before(@pipeline, A)
assert [A, B, C] == Pipeline.before(@pipeline, D)
assert [A, B, C, D] == Pipeline.before(@pipeline, E)
end
end
describe ".insert_before" do
test "raises an exception if one can't be found" do
assert_raise RuntimeError, fn -> Pipeline.insert_before([], Anything, X) end
end
test "inserts the phase before" do
assert [X, A, B, C, D, {E, [name: "e"]}, F] == Pipeline.insert_before(@pipeline, A, X)
assert [A, B, C, D, X, {E, [name: "e"]}, F] == Pipeline.insert_before(@pipeline, E, X)
end
end
describe ".upto" do
test "raises an exception if one can't be found" do
assert_raise RuntimeError, fn -> Pipeline.upto([], Anything) end
end
test "returns the phases upto the match" do
assert [A, B, C] == Pipeline.upto(@pipeline, C)
assert [A, B, C, D, {E, [name: "e"]}] == Pipeline.upto(@pipeline, E)
end
test "returns the pipeline without specified phase" do
assert [A, B, D, {E, [name: "e"]}, F] == Pipeline.without(@pipeline, C)
assert [A, B, C, D, F] == Pipeline.without(@pipeline, E)
end
end
describe ".replace" do
test "when not found, returns the pipeline unchanged" do
assert @pipeline == Pipeline.replace(@pipeline, X, ABC)
end
test "when found, when the target has options and no replacement options are given, replaces the phase but reuses the options" do
assert [A, B, C, D, {X, [name: "e"]}, F] == Pipeline.replace(@pipeline, E, X)
end
test "when found, when the target has options and replacement options are given, replaces the phase and uses the new options" do
assert [A, B, C, D, {X, [name: "Custom"]}, F] ==
Pipeline.replace(@pipeline, E, {X, [name: "Custom"]})
assert [A, B, C, D, {X, []}, F] == Pipeline.replace(@pipeline, E, {X, []})
end
test "when found, when the target has no options, simply replaces the phase" do
assert [A, B, C, X, {E, [name: "e"]}, F] == Pipeline.replace(@pipeline, D, X)
assert [A, B, C, {X, [name: "Custom Opt"]}, {E, [name: "e"]}, F] ==
Pipeline.replace(@pipeline, D, {X, [name: "Custom Opt"]})
end
end
end
defmodule Absinthe.Blueprint.TypeReferenceTest do
use Absinthe.Case, async: true
alias Absinthe.Blueprint
describe ".unwrap of Name" do
test "is left intact" do
name = %Blueprint.TypeReference.Name{name: "Foo"}
assert Blueprint.TypeReference.unwrap(name) == name
end
end
describe ".unwrap of List" do
test "extracts the inner name" do
name = %Blueprint.TypeReference.Name{name: "Foo"}
list = %Blueprint.TypeReference.List{of_type: name}
assert Blueprint.TypeReference.unwrap(list) == name
end
test "extracts the inner name, even when multiple deep" do
name = %Blueprint.TypeReference.Name{name: "Foo"}
list_1 = %Blueprint.TypeReference.List{of_type: name}
list_2 = %Blueprint.TypeReference.List{of_type: list_1}
assert Blueprint.TypeReference.unwrap(list_2) == name
end
end
describe ".unwrap of NonNull" do
test "extracts the inner name" do
name = %Blueprint.TypeReference.Name{name: "Foo"}
list = %Blueprint.TypeReference.NonNull{of_type: name}
assert Blueprint.TypeReference.unwrap(list) == name
end
test "extracts the inner name, even when multiple deep" do
name = %Blueprint.TypeReference.Name{name: "Foo"}
non_null = %Blueprint.TypeReference.NonNull{of_type: name}
list = %Blueprint.TypeReference.List{of_type: non_null}
assert Blueprint.TypeReference.unwrap(list) == name
end
end
end
use Mix.Config
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :logger, level: :debug
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :absinthe, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:absinthe, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
import_config "#{Mix.env()}.exs"
use Mix.Config
config :logger, level: :info
defmodule Mix.Tasks.Absinthe.Schema.Json do
require Logger
use Mix.Task
import Mix.Generator
@shortdoc "Generate a schema.json file for an Absinthe schema"
@default_filename "./schema.json"
@default_codec_name "Poison"
@moduledoc """
Generate a schema.json file
## Usage
absinthe.schema.json [FILENAME] [OPTIONS]
## Options
--schema The schema. Default: As configured for `:absinthe` `:schema`
--json-codec Sets JSON Codec. Default: #{@default_codec_name}
--pretty Whether to pretty-print. Default: false
## Examples
Write to default path `#{@default_filename}` using the `:schema` configured for
the `:absinthe` application and the default `#{@default_codec_name}` JSON codec:
$ mix absinthe.schema.json
Write to default path `#{@default_filename}` using the `MySchema` schema and
the default `#{@default_codec_name}` JSON codec.
$ mix absinthe.schema.json --schema MySchema
Write to path `/path/to/schema.json` using the `MySchema` schema, using the
default `#{@default_codec_name}` JSON codec, and pretty-printing:
$ mix absinthe.schema.json --schema MySchema --pretty /path/to/schema.json
Write to default path `#{@default_filename}` using the `MySchema` schema and
a custom JSON codec, `MyCodec`:
$ mix absinthe.schema.json --schema MySchema --json-codec MyCodec
"""
@introspection_graphql Path.join([:code.priv_dir(:absinthe), "graphql", "introspection.graphql"])
def run(argv) do
Application.ensure_all_started(:absinthe)
Mix.Task.run("loadpaths", argv)
Mix.Project.compile(argv)
{opts, args, _} = OptionParser.parse(argv)
schema = find_schema(opts)
json_codec = find_json(opts)
filename = args |> List.first() || @default_filename
{:ok, query} = File.read(@introspection_graphql)
case Absinthe.run(query, schema) do
{:ok, result} ->
create_directory(Path.dirname(filename))
content = json_codec.module.encode!(result, json_codec.opts)
create_file(filename, content, force: true)
{:error, error} ->
raise error
end
end
defp find_json(opts) do
case Keyword.get(opts, :json_codec, Poison) do
module when is_atom(module) ->
%{module: module, opts: codec_opts(module, opts)}
other ->
other
end
end
defp codec_opts(Poison, opts) do
[pretty: Keyword.get(opts, :pretty, false)]
end
defp codec_opts(_, _) do
[]
end
defp find_schema(opts) do
case Keyword.get(opts, :schema, Application.get_env(:absinthe, :schema)) do
nil ->
raise "No --schema given or :schema configured for the :absinthe application"
value ->
[value] |> Module.safe_concat()
end
end
end
defmodule Absinthe do
@moduledoc """
Documentation for the Absinthe package, a toolkit for building GraphQL
APIs with Elixir.
For usage information, see [the documentation](http://hexdocs.pm/absinthe), which
includes guides, API information for important modules, and links to useful resources.
"""
defmodule ExecutionError do
@moduledoc """
An error during execution.
"""
defexception message: "execution failed"
end
defmodule AnalysisError do
@moduledoc """
An error during analysis.
"""
defexception message: "analysis failed"
end
@type result_selection_t :: %{
String.t() =>
nil
| integer
| float
| boolean
| binary
| atom
| [result_selection_t]
}
@type result_error_t ::
%{message: String.t()}
| %{message: String.t(), locations: [%{line: pos_integer, column: integer}]}
@type result_t ::
%{data: nil | result_selection_t}
| %{data: nil | result_selection_t, errors: [result_error_t]}
| %{errors: [result_error_t]}
@doc """
Evaluates a query document against a schema, with options.
## Options
* `:adapter` - The name of the adapter to use. See the `Absinthe.Adapter`
behaviour and the `Absinthe.Adapter.Passthrough` and
`Absinthe.Adapter.LanguageConventions` modules that implement it.
(`Absinthe.Adapter.LanguageConventions` is the default value for this option.)
* `:operation_name` - If more than one operation is present in the provided
query document, this must be provided to select which operation to execute.
* `:variables` - A map of provided variable values to be used when filling in
arguments in the provided query document.
* `:context` -> A map of the execution context.
* `:root_value` -> A root value to use as the source for toplevel fields.
* `:analyze_complexity` -> Whether to analyze the complexity before
executing an operation.
* `:max_complexity` -> An integer (or `:infinity`) for the maximum allowed
complexity for the operation being executed.
## Examples
```
\"""
query GetItemById($id: ID) {
item(id: $id) {
name
}
}
\"""
|> Absinthe.run(App.Schema, variables: %{"id" => params[:item_id]})
```
See the `Absinthe` module documentation for more examples.
"""
@type run_opts :: [
context: %{},
adapter: Absinthe.Adapter.t(),
root_value: term,
operation_name: String.t(),
analyze_complexity: boolean,
max_complexity: non_neg_integer | :infinity
]
@type run_result :: {:ok, result_t} | {:error, String.t()}
@spec run(
binary | Absinthe.Language.Source.t() | Absinthe.Language.Document.t(),
Absinthe.Schema.t(),
run_opts
) :: run_result
def run(document, schema, options \\ []) do
pipeline =
schema
|> Absinthe.Pipeline.for_document(options)
case Absinthe.Pipeline.run(document, pipeline) do
{:ok, %{result: result}, _phases} ->
{:ok, result}
{:error, msg, _phases} ->
{:error, msg}
end
end
@doc """
Evaluates a query document against a schema, without options.
## Options
See `run/3` for the available options.
"""
@spec run!(
binary | Absinthe.Language.Source.t() | Absinthe.Language.Document.t(),
Absinthe.Schema.t(),
Keyword.t()
) :: result_t | no_return
def run!(input, schema, options \\ []) do
case run(input, schema, options) do
{:ok, result} -> result
{:error, err} -> raise ExecutionError, message: err
end
end
end
defmodule Absinthe.Adapter do
@moduledoc """
Absinthe supports an adapter mechanism that allows developers to define their
schema using one code convention (eg, `snake_cased` fields and arguments), but
accept query documents and return results (including names in errors) in
another (eg, `camelCase`).
Adapters aren't a part of GraphQL, but a utility that Absinthe adds so that
both client and server can use use conventions most natural to them.
Absinthe ships with two adapters:
* `Absinthe.Adapter.LanguageConventions`, which expects schemas to be defined
in `snake_case` (the standard Elixir convention), translating to/from `camelCase`
for incoming query documents and outgoing results. (This is the default as of v0.3.)
* `Absinthe.Adapter.Underscore`, which is similar to the `LanguageConventions`
adapter but converts all incoming identifiers to underscores and does not
modify outgoing identifiers (since those are already expected to be
underscores). Unlike `Absinthe.Adapter.Passthrough` this does not break
introspection.
* `Absinthe.Adapter.Passthrough`, which is a no-op adapter and makes no
modifications. (Note at the current time this does not support introspection
if you're using camelized conventions).
To set an adapter, you pass a configuration option at runtime:
For `Absinthe.run/3`:
```
Absinthe.run(
query,
MyApp.Schema,
adapter: YourApp.Adapter.TheAdapterName
)
```
For `Absinthe.Plug`:
```
forward "/api",
to: Absinthe.Plug,
init_opts: [schema: MyAppWeb.Schema, adapter: YourApp.Adapter.TheAdapterName]
```
For GraphiQL:
```
forward "/graphiql",
to: Absinthe.Plug.GraphiQL,
init_opts: [schema: MyAppWeb.Schema, adapter: YourApp.Adapter.TheAdapterName]
```
Check `Absinthe.Plug` for full documentation on using the Plugs
Notably, this means you're able to switch adapters on case-by-case basis.
In a Phoenix application, this means you could even support using different
adapters for different clients.
A custom adapter module must merely implement the `Absinthe.Adapter` protocol,
in many cases with `use Absinthe.Adapter` and only overriding the desired
functions.
## Writing Your Own
All you may need to implement in your adapter is `to_internal_name/2` and
`to_external_name/2`.
Check out `Absinthe.Adapter.LanguageConventions` for a good example.
Note that types that are defined external to your application (including
the introspection types) may not be compatible if you're using a different
adapter.
"""
@type t :: module
defmacro __using__(_) do
quote do
@behaviour unquote(__MODULE__)
def to_internal_name(external_name, _role) do
external_name
end
def to_external_name(internal_name, _role) do
internal_name
end
defoverridable to_internal_name: 2,
to_external_name: 2
end
end
@typedoc "The lexical role of a name within the document/schema."
@type role_t :: :operation | :field | :argument | :result | :type | :directive
@doc """
Convert a name from an external name to an internal name.
## Examples
Prefix all names with their role, just for fun!
```
def to_internal_name(external_name, role) do
role_name = role |> to_string
role_name <> "_" <> external_name
end
```
"""
@callback to_internal_name(binary, role_t) :: binary
@doc """
Convert a name from an internal name to an external name.
## Examples
Remove the role-prefix (the inverse of what we did in `to_internal_name/2` above):
```
def to_external_name(internal_name, role) do
internal_name
|> String.replace(~r/^\#{role}_/, "")
end
```
"""
@callback to_external_name(binary, role_t) :: binary
end
defmodule Absinthe.Traversal do
# Graph traversal utilities for dealing with ASTs and schemas using the
# `Absinthe.Traversal.Node` protocol.
# Note this really only exists to handle some Schema rules stuff and is generally
# considered legacy code. See `Absinthe.Blueprint.Transform` for information
# on how to walk blueprint trees.
@moduledoc false
alias __MODULE__
alias Absinthe.Traversal.Node
@type t :: %__MODULE__{context: any, seen: [Node.t()], path: [Node.t()]}
defstruct context: nil, seen: [], path: []
# Instructions defining behavior during traversal
# * `{:ok, value, traversal}`: The value of the node is `value`, and traversal
# should continue to children (using `traversal`)
# * `{:prune, value, traversal}`: The value of the node is `value` and
# traversal should NOT continue to children, but to siblings (using
# `traversal`)
# * `{:error, message}`: Bad stuff happened, explained by `message`
@type instruction_t :: {:ok, any, t} | {:prune, any, t} | {:error, any}
# Traverse, reducing nodes using a given function to evaluate their value.
@doc false
@spec reduce(Node.t(), any, acc, (Node.t(), t, acc -> instruction_t)) :: acc when acc: var
def reduce(node, context, initial_value, node_evaluator) do
{result, _traversal} =
do_reduce(node, %Traversal{context: context}, initial_value, node_evaluator)
result
end
# Reduce using a traversal struct
@spec do_reduce(Node.t(), t, acc, (Node.t(), t, acc -> instruction_t)) :: {acc, t} when acc: var
defp do_reduce(node, traversal, initial_value, node_evaluator) do
if seen?(traversal, node) do
{initial_value, traversal}
else
case node_evaluator.(node, traversal, initial_value) do
{:ok, value, next_traversal} ->
reduce_children(node, next_traversal |> put_seen(node), value, node_evaluator)
{:prune, value, next_traversal} ->
{value, next_traversal |> put_seen(node)}
end
end
end
# Traverse a node's children
@spec reduce_children(Node.t(), t, acc, (Node.t(), t, acc -> instruction_t)) :: {acc, t}
when acc: var
defp reduce_children(node, traversal, initial, node_evalator) do
Enum.reduce(Node.children(node, traversal), {initial, traversal}, fn child,
{this_value,
this_traversal} ->
do_reduce(child, this_traversal, this_value, node_evalator)
end)
end
@spec seen?(t, Node.t()) :: boolean
defp seen?(traversal, node), do: traversal.seen |> Enum.member?(node)
@spec put_seen(t, Node.t()) :: t
defp put_seen(traversal, node) do
%{traversal | seen: [node | traversal.seen]}
end
end
defmodule Absinthe.Blueprint do
@moduledoc """
Represents the graphql document to be executed.
Please see the code itself for more information on individual blueprint sub
modules.
"""
alias __MODULE__
defstruct operations: [],
directives: [],
fragments: [],
name: nil,
schema_definitions: [],
schema: nil,
adapter: nil,
# Added by phases
flags: %{},
errors: [],
input: nil,
execution: %Blueprint.Execution{},
result: %{}
@type t :: %__MODULE__{
operations: [Blueprint.Document.Operation.t()],
schema_definitions: [Blueprint.Schema.t()],
directives: [Blueprint.Schema.DirectiveDefinition.t()],
name: nil | String.t(),
fragments: [Blueprint.Document.Fragment.Named.t()],
schema: nil | Absinthe.Schema.t(),
adapter: nil | Absinthe.Adapter.t(),
# Added by phases
errors: [Absinthe.Phase.Error.t()],
flags: flags_t,
execution: Blueprint.Execution.t(),
result: result_t
}
@type result_t :: %{
optional(:data) => term,
optional(:errors) => [term],
optional(:extensions) => term
}
@type node_t ::
Blueprint.t()
| Blueprint.Directive.t()
| Blueprint.Document.t()
| Blueprint.Schema.t()
| Blueprint.Input.t()
| Blueprint.TypeReference.t()
@type use_t ::
Blueprint.Document.Fragment.Named.Use.t()
| Blueprint.Input.Variable.Use.t()
@type flags_t :: %{atom => module}
defdelegate prewalk(blueprint, fun), to: Absinthe.Blueprint.Transform
defdelegate prewalk(blueprint, acc, fun), to: Absinthe.Blueprint.Transform
defdelegate postwalk(blueprint, fun), to: Absinthe.Blueprint.Transform
defdelegate postwalk(blueprint, acc, fun), to: Absinthe.Blueprint.Transform
def find(blueprint, fun) do
{_, found} =
Blueprint.prewalk(blueprint, nil, fn
node, nil ->
if fun.(node) do
{node, node}
else
{node, nil}
end
node, found ->
# Already found
{node, found}
end)
found
end
@doc false
# This is largely a debugging tool which replaces `schema_node` struct values
# with just the type identifier, rendering the blueprint tree much easier to read
def __compress__(blueprint) do
prewalk(blueprint, fn
%{schema_node: %{identifier: id}} = node ->
%{node | schema_node: id}
node ->
node
end)
end
@spec fragment(t, String.t()) :: nil | Blueprint.Document.Fragment.Named.t()
def fragment(blueprint, name) do
Enum.find(blueprint.fragments, &(&1.name == name))
end
@doc """
Add a flag to a node.
"""
@spec put_flag(node_t, atom, module) :: node_t
def put_flag(node, flag, mod) do
update_in(node.flags, &Map.put(&1, flag, mod))
end
@doc """
Determine whether a flag has been set on a node.
"""
@spec flagged?(node_t, atom) :: boolean
def flagged?(node, flag) do
Map.has_key?(node.flags, flag)
end
@doc """
Get the currently selected operation.
"""
@spec current_operation(t) :: nil | Blueprint.Document.Operation.t()
def current_operation(blueprint) do
Enum.find(blueprint.operations, &(&1.current == true))
end
@doc """
Update the current operation.
"""
@spec update_current(t, (Blueprint.Document.Operation.t() -> Blueprint.Document.Operation.t())) ::
t
def update_current(blueprint, change) do
ops =
Enum.map(blueprint.operations, fn
%{current: true} = op ->
change.(op)
other ->
other
end)
%{blueprint | operations: ops}
end
end
defmodule Absinthe.Middleware.Batch do
@moduledoc """
Batch the resolution of multiple fields.
## Motivation
Consider the following graphql query:
```
{
posts {
author {
name
}
}
}
```
`posts` returns a list of `post` objects, which has an associated `author` field.
If the `author` field makes a call to the database we have the classic N + 1 problem.
What we want is a way to load all authors for all posts in one database request.
This plugin provides this, without any eager loading at the parent level. That is,
the code for the `posts` field does not need to do anything to facilitate the
efficient loading of its children.
## Example Usage
The API for this plugin is a little on the verbose side because it is not specific
to any particular batching mechanism. That is, this API is just as useful for an Ecto
based DB as it is for talking to S3 or the File System. Thus we anticipate people
(including ourselves) will be creating additional functions more tailored to each
of those specific use cases.
Here is an example using the `Absinthe.Resolution.Helpers.batch/3` helper.
```elixir
object :post do
field :name, :string
field :author, :user do
resolve fn post, _, _ ->
batch({__MODULE__, :users_by_id}, post.author_id, fn batch_results ->
{:ok, Map.get(batch_results, post.author_id)}
end)
end
end
end
def users_by_id(_, user_ids) do
users = Repo.all from u in User, where: u.id in ^user_ids
Map.new(users, fn user -> {user.id, user} end)
end
```
Let's look at this piece by piece:
- `{__MODULE__, :users_by_id}`: is the batching function which will be used. It must
be a 2 arity function. For details see the `batch_fun` typedoc.
- `post.author_id`: This is the information to be aggregated. The aggregated values
are the second argument to the batching function.
- `fn batch_results`: This function takes the results from the batching function.
it should return one of the resolution function values.
Clearly some of this could be derived for ecto functions. Check out the Absinthe.Ecto
library for something that provides this:
```elixir
field :author, :user, resolve: assoc(:author)
```
Such a function could be easily built upon the API of this module.
"""
@behaviour Absinthe.Middleware
@behaviour Absinthe.Plugin
@typedoc """
The function to be called with the aggregate batch information.
It comes in both a 2 tuple and 3 tuple form. The first two elements are the module
and function name. The third element is an arbitrary parameter that is passed
as the first argument to the batch function.
For example, one could parameterize the `users_by_id` function from the moduledoc
to make it more generic. Instead of doing `{__MODULE__, :users_by_id}` you could do
`{__MODULE__, :by_id, User}`. Then the function would be:
```elixir
def by_id(model, ids) do
model
|> where([m], m.id in ^ids)
|> Repo.all()
|> Map.new(&{&1.id, &1})
end
```
It could also be used to set options unique to the execution of a particular
batching function.
"""
@type batch_fun :: {module, atom} | {module, atom, term}
@type post_batch_fun :: (term -> Absinthe.Type.Field.result())
def before_resolution(exec) do
case exec.acc do
%{__MODULE__ => _} ->
put_in(exec.acc[__MODULE__][:input], [])
_ ->
put_in(exec.acc[__MODULE__], %{input: [], output: %{}})
end
end
def call(%{state: :unresolved} = res, {batch_key, field_data, post_batch_fun, batch_opts}) do
acc = res.acc
acc =
update_in(acc[__MODULE__][:input], fn
nil -> [{{batch_key, batch_opts}, field_data}]
data -> [{{batch_key, batch_opts}, field_data} | data]
end)
%{
res
| state: :suspended,
middleware: [{__MODULE__, {batch_key, post_batch_fun}} | res.middleware],
acc: acc
}
end
def call(%{state: :suspended} = res, {batch_key, post_batch_fun}) do
batch_data_for_fun =
res.acc
|> Map.fetch!(__MODULE__)
|> Map.fetch!(:output)
|> Map.fetch!(batch_key)
res
|> Absinthe.Resolution.put_result(post_batch_fun.(batch_data_for_fun))
end
def after_resolution(exec) do
output = do_batching(exec.acc[__MODULE__][:input])
put_in(exec.acc[__MODULE__][:output], output)
end
defp do_batching(input) do
input
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Enum.map(fn {{batch_fun, batch_opts}, batch_data} ->
{batch_opts,
Task.async(fn ->
{batch_fun, call_batch_fun(batch_fun, batch_data)}
end)}
end)
|> Map.new(fn {batch_opts, task} ->
timeout = Keyword.get(batch_opts, :timeout, 5_000)
Task.await(task, timeout)
end)
end
defp call_batch_fun({module, fun}, batch_data) do
call_batch_fun({module, fun, []}, batch_data)
end
defp call_batch_fun({module, fun, config}, batch_data) do
apply(module, fun, [config, batch_data])
end
# If the flag is set we need to do another resolution phase.
# otherwise, we do not
def pipeline(pipeline, exec) do
case exec.acc[__MODULE__][:input] do
[_ | _] ->
[Absinthe.Phase.Document.Execution.Resolution | pipeline]
_ ->
pipeline
end
end
end
defmodule Absinthe.Middleware.MapGet do
@moduledoc """
This is the default middleware. It assumes the the object it receives is a map
and uses `Map.get/2` to get the value for this field. If this field is already
marked as resolved, then this middleware does not touch it.
If you want to replace this middleware you should use
`Absinthe.Schema.replace_default/4`
"""
@behaviour Absinthe.Middleware
def call(%{state: :unresolved, source: source} = res, key) do
%{res | state: :resolved, value: Map.get(source, key)}
end
def call(res, _key), do: res
end
defmodule Absinthe.Middleware.Async do
@moduledoc """
This plugin enables asynchronous execution of a field.
See also `Absinthe.Resolution.Helpers.async/1`
# Example Usage:
Using the `Absinthe.Resolution.Helpers.async/1` helper function:
```elixir
field :time_consuming, :thing do
resolve fn _, _, _ ->
async(fn ->
{:ok, long_time_consuming_function()}
end)
end
end
```
Using the bare plugin API
```elixir
field :time_consuming, :thing do
resolve fn _, _, _ ->
task = Task.async(fn ->
{:ok, long_time_consuming_function()}
end
{:middleware, #{__MODULE__}, task}
end
end
```
This module also serves as an example for how to build middleware that uses the
resolution callbacks.
See the source code and associated comments for further details.
"""
@behaviour Absinthe.Middleware
@behaviour Absinthe.Plugin
# A function has handed resolution off to this middleware. The first argument
# is the current resolution struct. The second argument is the function to
# execute asynchronously, and opts we'll want to use when it is time to await
# the task.
#
# This function suspends resolution, and sets the async flag true in the resolution
# accumulator. This will be used later to determine whether we need to run resolution
# again.
#
# This function inserts additional middleware into the remaining middleware
# stack for this field. On the next resolution pass, we need to `Task.await` the
# task so we have actual data. Thus, we prepend this module to the middleware stack.
def call(%{state: :unresolved} = res, {fun, opts}) do
task_data = {Task.async(fun), opts}
%{
res
| state: :suspended,
acc: Map.put(res.acc, __MODULE__, true),
middleware: [{__MODULE__, task_data} | res.middleware]
}
end
# This is the clause that gets called on the second pass. There's very little
# to do here. We just need to await the task started in the previous pass.
#
# Finally, we apply the result to the resolution using a helper function that ensures
# we handle the different tuple results.
#
# The `put_result` function handles setting the appropriate state.
# If the result is an `{:ok, value} | {:error, reason}` tuple it will set
# the state to `:resolved`, and if it is another middleware tuple it will
# set the state to unresolved.
def call(%{state: :suspended} = res, {task, opts}) do
result = Task.await(task, opts[:timeout] || 30_000)
res
|> Absinthe.Resolution.put_result(result)
end
# We must set the flag to false because if a previous resolution iteration
# set it to true it needs to go back to false now. It will be set
# back to true if any field uses this plugin again.
def before_resolution(exec) do
put_in(exec.acc[__MODULE__], false)
end
# Nothing to do after resolution for this plugin, so we no-op
def after_resolution(exec), do: exec
# If the flag is set we need to do another resolution phase.
# otherwise, we do not
def pipeline(pipeline, exec) do
case exec.acc do
%{__MODULE__ => true} ->
[Absinthe.Phase.Document.Execution.Resolution | pipeline]
_ ->
pipeline
end
end
end
defmodule Absinthe.Middleware.PassParent do
@moduledoc """
Middleware that just passes the parent down to the children.
This is the default resolver for subscription fields.
"""
@behaviour Absinthe.Middleware
def call(%{source: parent} = res, _) do
%{res | state: :resolved, value: parent}
end
end
if Code.ensure_loaded?(Dataloader) do
defmodule Absinthe.Middleware.Dataloader do
@behaviour Absinthe.Middleware
@behaviour Absinthe.Plugin
def before_resolution(%{context: context} = exec) do
context =
with %{loader: loader} <- context do
%{context | loader: Dataloader.run(loader)}
end
%{exec | context: context}
end
def call(%{state: :unresolved} = resolution, {loader, callback}) do
if !Dataloader.pending_batches?(loader) do
get_result(resolution, callback)
else
%{
resolution
| context: Map.put(resolution.context, :loader, loader),
state: :suspended,
middleware: [{__MODULE__, callback} | resolution.middleware]
}
end
end
def call(%{state: :suspended} = resolution, callback) do
get_result(resolution, callback)
end
defp get_result(resolution, callback) do
value = callback.(resolution.context.loader)
Absinthe.Resolution.put_result(resolution, value)
end
def after_resolution(exec) do
exec
end
def pipeline(pipeline, exec) do
with %{loader: loader} <- exec.context,
true <- Dataloader.pending_batches?(loader) do
[Absinthe.Phase.Document.Execution.Resolution | pipeline]
else
_ -> pipeline
end
end
end
end
defmodule Absinthe.Pipeline.BatchResolver do
alias Absinthe.Phase.Document.Execution
require Logger
@moduledoc false
def run([], _), do: []
def run([bp | _] = blueprints, options) do
schema = Keyword.fetch!(options, :schema)
plugins = schema.plugins()
acc = init(blueprints, :acc)
ctx = init(blueprints, :context)
# This will serve as a generic cross document execution struct
exec = %{
bp.execution
| acc: acc,
context: ctx,
fragments: %{},
validation_errors: [],
result: nil
}
resolution_phase = {Execution.Resolution, [plugin_callbacks: false] ++ options}
do_resolve(blueprints, [resolution_phase], exec, plugins, resolution_phase, options)
end
defp init(blueprints, attr) do
Enum.reduce(blueprints, %{}, &Map.merge(Map.fetch!(&1.execution, attr), &2))
end
# defp update()
defp do_resolve(blueprints, phases, exec, plugins, resolution_phase_template, options) do
exec =
Enum.reduce(plugins, exec, fn plugin, exec ->
plugin.before_resolution(exec)
end)
abort_on_error? = Keyword.get(options, :abort_on_error, true)
{blueprints, exec} = execute(blueprints, phases, abort_on_error?, [], exec)
exec =
Enum.reduce(plugins, exec, fn plugin, exec ->
plugin.after_resolution(exec)
end)
plugins
|> Absinthe.Plugin.pipeline(exec)
|> case do
[] ->
blueprints
pipeline ->
pipeline =
Absinthe.Pipeline.replace(pipeline, Execution.Resolution, resolution_phase_template)
do_resolve(blueprints, pipeline, exec, plugins, resolution_phase_template, options)
end
end
defp execute([], _phases, _abort_on_error?, results, exec) do
{:lists.reverse(results), exec}
end
defp execute([bp | rest], phases, abort_on_error?, results, exec) do
bp
|> update_exec(exec)
|> run_pipeline(phases, abort_on_error?)
|> case do
{:ok, bp} ->
%{acc: acc, context: ctx} = bp.execution
exec = %{exec | acc: acc, context: ctx}
execute(rest, phases, abort_on_error?, [bp | results], exec)
:error ->
execute(rest, phases, abort_on_error?, [:error | results], exec)
end
end
defp run_pipeline(bp, phases, _abort_on_error? = true) do
{:ok, blueprint, _} = Absinthe.Pipeline.run(bp, phases)
{:ok, blueprint}
end
defp run_pipeline(bp, phases, _) do
{:ok, blueprint, _} = Absinthe.Pipeline.run(bp, phases)
{:ok, blueprint}
rescue
e ->
pipeline_error(e, System.stacktrace())
:error
end
defp update_exec(%{execution: execution} = bp, %{acc: acc, context: ctx}) do
%{bp | execution: %{execution | acc: acc, context: ctx}}
end
def pipeline_error(exception, trace) do
message = Exception.message(exception)
stacktrace = trace |> Exception.format_stacktrace()
Logger.error("""
#{message}
#{stacktrace}
""")
end
end
defmodule Absinthe.Pipeline.ErrorResult do
@moduledoc """
A basic struct that wraps phase errors for
reporting to the user.
"""
alias Absinthe.Phase
defstruct errors: []
@type t :: %__MODULE__{
errors: [Phase.Error.t()]
}
@doc "Generate a new ErrorResult for one or more phase errors"
@spec new(Phase.Error.t() | [Phase.Error.t()]) :: t
def new(errors) do
%__MODULE__{errors: List.wrap(errors)}
end
end
defmodule Absinthe.Complexity do
@moduledoc """
Extra metadata passed to aid complexity analysis functions, describing the
current field's environment.
"""
alias Absinthe.{Blueprint, Schema}
@enforce_keys [:context, :root_value, :schema, :definition]
defstruct [:context, :root_value, :schema, :definition]
@typedoc """
- `:definition` - The Blueprint definition for this field.
- `:context` - The context passed to `Absinthe.run`.
- `:root_value` - The root value passed to `Absinthe.run`, if any.
- `:schema` - The current schema.
"""
@type t :: %__MODULE__{
definition: Blueprint.node_t(),
context: map,
root_value: any,
schema: Schema.t()
}
end
defmodule Absinthe.Phase.Blueprint do
use Absinthe.Phase
@moduledoc false
alias Absinthe.Blueprint
@spec run(any, Keyword.t()) :: {:ok, Blueprint.t()}
def run(blueprint, _options \\ []) do
input = blueprint.input
blueprint = Blueprint.Draft.convert(input, blueprint)
{:ok, blueprint}
end
end
defmodule Absinthe.Phase.Error do
@moduledoc false
@enforce_keys [:message, :phase]
defstruct [
:message,
:phase,
locations: [],
extra: %{},
path: []
]
@type loc_t :: %{optional(any) => any, line: pos_integer, column: pos_integer}
@type t :: %__MODULE__{
message: String.t(),
phase: module,
locations: [loc_t],
path: [],
extra: map
}
end
defmodule Absinthe.Phase.Document.Variables do
@moduledoc false
# Provided a set of variable values:
#
# - Set the `variables` field on the `Blueprint.Document.Operation.t` to the reconciled
# mapping of variable values, supporting defined default values.
#
# ## Examples
#
# Given a GraphQL document that looks like:
#
# ```
# query Item($id: ID!, $text = String = "Another") {
# item(id: $id, category: "Things") {
# name
# }
# }
# ```
#
# And this phase configuration:
#
# ```
# run(blueprint, %{"id" => "1234"})
# ``
#
# - The operation's `variables` field would have an `"id"` value set to
# `%Blueprint.Input.StringValue{value: "1234"}`
# - The operation's `variables` field would have an `"text"` value set to
# `%Blueprint.Input.StringValue{value: "Another"}`
#
# ```
# run(blueprint, %{})
# ```
#
# - The operation's `variables` field would have an `"id"` value set to
# `nil`
# - The operation's `variables` field would have an `"text"` value set to
# `%Blueprint.Input.StringValue{value: "Another"}`
#
# Note that no validation occurs in this phase.
use Absinthe.Phase
alias Absinthe.Blueprint
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, options \\ []) do
variables = options[:variables] || %{}
{:ok, update_operations(input, variables)}
end
def update_operations(input, variables) do
operations =
for op <- input.operations do
update_operation(op, variables)
end
%{input | operations: operations}
end
def update_operation(%{variable_definitions: variable_definitions} = operation, variables) do
{variable_definitions, provided_values} =
Enum.map_reduce(variable_definitions, %{}, fn node, acc ->
provided_value = calculate_value(node, variables)
{
%{node | provided_value: provided_value},
Map.put(acc, node.name, provided_value)
}
end)
%{operation | variable_definitions: variable_definitions, provided_values: provided_values}
end
defp calculate_value(node, variables) do
case Map.fetch(variables, node.name) do
:error ->
node.default_value
{:ok, value} ->
value
|> preparse_nil
|> Blueprint.Input.parse()
end
end
defp preparse_nil(nil), do: %Blueprint.Input.Null{}
defp preparse_nil(other), do: other
end
defmodule Absinthe.Phase.Document.MissingLiterals do
@moduledoc false
# Fills out missing arguments and input object fields.
#
# Filling out means inserting a stubbed `Input.Argument` or `Input.Field` struct.
#
# Only those arguments which are non null and / or have a default value are filled
# out.
#
# If an argument or input object field is non null and missing, it is marked invalid
use Absinthe.Phase
alias Absinthe.{Blueprint, Type}
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, _options \\ []) do
node = Blueprint.prewalk(input, &populate_node(&1, input.adapter, input.schema))
{:ok, node}
end
defp populate_node(%{schema_node: nil} = node, _adapter, _schema), do: node
defp populate_node(
%{arguments: arguments, schema_node: %{args: schema_args}} = node,
adapter,
schema
) do
arguments =
fill_missing_nodes(
Blueprint.Input.Argument,
arguments,
schema_args,
node.source_location,
adapter,
schema
)
%{node | arguments: arguments}
end
defp populate_node(
%Blueprint.Input.Object{fields: fields, schema_node: %{fields: schema_fields}} = node,
adapter,
schema
) do
fields =
fill_missing_nodes(
Blueprint.Input.Field,
fields,
schema_fields,
node.source_location,
adapter,
schema
)
%{node | fields: fields}
end
defp populate_node(
%Blueprint.Input.Object{schema_node: %{of_type: type}} = node,
adapter,
schema
) do
%{node | schema_node: type}
|> populate_node(adapter, schema)
end
defp populate_node(node, _adapter, _schema), do: node
defp fill_missing_nodes(type, arguments, schema_args, source_location, adapter, schema) do
missing_schema_args = find_missing_schema_nodes(arguments, schema_args)
missing_schema_args
|> Map.values()
|> Enum.reduce(arguments, fn
# If it's deprecated without a default, ignore it
%{deprecation: %{}, default_value: nil}, arguments ->
arguments
# If it has a default value, we want it.
%{default_value: val} = schema_node, arguments when not is_nil(val) ->
arg = build_node(type, schema_node, val, source_location, adapter, schema)
[arg | arguments]
# It isn't deprecated, it is null, and there's no default value. It's missing
%{type: %Type.NonNull{}} = missing_mandatory_arg_schema_node, arguments ->
arg =
type
|> build_node(
missing_mandatory_arg_schema_node,
missing_mandatory_arg_schema_node.default_value,
source_location,
adapter,
schema
)
|> flag_invalid(:missing)
[arg | arguments]
# No default value, and it's allowed to be null. Ignore it.
_, arguments ->
arguments
end)
end
# Given the set of possible schema args, return only those not supplied in
# the document argument / fields
defp find_missing_schema_nodes(nodes, schema_nodes) do
nodes
|> Enum.filter(& &1.schema_node)
|> Enum.reduce(schema_nodes, fn
%{schema_node: %{identifier: id}}, acc ->
Map.delete(acc, id)
_, acc ->
acc
end)
end
defp build_node(type, schema_node_arg, default, source_location, adapter, schema) do
struct!(type, %{
name: schema_node_arg.name |> build_name(adapter, type),
input_value: %Blueprint.Input.Value{
data: default,
normalized:
if(is_nil(default), do: nil, else: %Blueprint.Input.Generated{by: __MODULE__}),
raw: nil,
schema_node: Type.expand(schema_node_arg.type, schema)
},
schema_node: schema_node_arg,
source_location: source_location
})
end
defp build_name(name, adapter, Blueprint.Input.Argument) do
adapter.to_external_name(name, :argument)
end
defp build_name(name, adapter, Blueprint.Input.Field) do
adapter.to_external_name(name, :field)
end
end
defmodule Absinthe.Phase.Document.Complexity.Analysis do
@moduledoc false
# Analyses document complexity.
alias Absinthe.{Blueprint, Phase, Complexity, Type}
use Absinthe.Phase
@default_complexity 1
@doc """
Run complexity analysis.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, options \\ []) do
if Keyword.get(options, :analyze_complexity, false) do
do_run(input, options)
else
{:ok, input}
end
end
defp do_run(input, options) do
info = info_boilerplate(input, options)
fragments = process_fragments(input, info)
fun = &handle_node(&1, info, fragments)
{:ok, Blueprint.postwalk(input, fun)}
end
defp process_fragments(input, info) do
Enum.reduce(input.fragments, %{}, fn fragment, processed ->
fun = &handle_node(&1, info, processed)
fragment = Blueprint.postwalk(fragment, fun)
Map.put(processed, fragment.name, fragment)
end)
end
def handle_node(%Blueprint.Document.Fragment.Spread{name: name} = node, _info, fragments) do
fragment = Map.fetch!(fragments, name)
%{node | complexity: fragment.complexity}
end
def handle_node(
%Blueprint.Document.Fragment.Named{selections: fields} = node,
_info,
_fragments
) do
%{node | complexity: sum_complexity(fields)}
end
def handle_node(
%Blueprint.Document.Field{
complexity: nil,
selections: fields,
argument_data: args,
schema_node: schema_node
} = node,
info,
_fragments
) do
# NOTE:
# This really should be more nuanced. If this particular field's schema node
# is a union type, right now the complexity of:
# thisField {
# ... User { a b c}
# ... Dog { x y z }
# }
# would be the complexity of `|a, b, c, x, y, z|` despite the fact that it is
# impossible for `a, b, c` to also happen with `x, y, z`
#
# However, if this schema node is an interface type things get complicated quickly.
# You would have to evaluate the complexity for every possible type which can get
# pretty unwieldy. For now, simple types it is.
child_complexity = sum_complexity(fields)
schema_node = %{
schema_node
| complexity: Type.function(schema_node, :complexity)
}
case field_complexity(schema_node, args, child_complexity, info, node) do
complexity when is_integer(complexity) and complexity >= 0 ->
%{node | complexity: complexity}
other ->
raise Absinthe.AnalysisError, field_value_error(node, other)
end
end
def handle_node(%Blueprint.Document.Operation{complexity: nil, selections: fields} = node, _, _) do
%{node | complexity: sum_complexity(fields)}
end
def handle_node(node, _, _) do
node
end
defp field_complexity(%{complexity: nil}, _, child_complexity, _, _) do
@default_complexity + child_complexity
end
defp field_complexity(%{complexity: complexity}, arg, child_complexity, _, _)
when is_function(complexity, 2) do
complexity.(arg, child_complexity)
end
defp field_complexity(%{complexity: complexity}, arg, child_complexity, info, node)
when is_function(complexity, 3) do
info = struct(Complexity, Map.put(info, :definition, node))
complexity.(arg, child_complexity, info)
end
defp field_complexity(%{complexity: {mod, fun}}, arg, child_complexity, info, node) do
info = struct(Complexity, Map.put(info, :definition, node))
apply(mod, fun, [arg, child_complexity, info])
end
defp field_complexity(%{complexity: complexity}, _, _, _, _) do
complexity
end
defp field_value_error(field, value) do
"""
Invalid value returned from complexity analyzer.
Analyzing field:
#{field.name}
Defined at:
#{field.schema_node.__reference__.location.file}:#{
field.schema_node.__reference__.location.line
}
Got value:
#{inspect(value)}
The complexity value must be a non negative integer.
"""
end
defp sum_complexity(fields) do
Enum.reduce(fields, 0, &sum_complexity/2)
end
defp sum_complexity(%{complexity: complexity}, acc) when is_nil(complexity) do
@default_complexity + acc
end
defp sum_complexity(%{complexity: complexity}, acc) when is_integer(complexity) do
complexity + acc
end
# Execution context data that's common to all fields
defp info_boilerplate(bp_root, options) do
%{
context: options[:context] || %{},
root_value: options[:root_value] || %{},
schema: bp_root.schema
}
end
end
defmodule Absinthe.Phase.Document.Complexity.Result do
@moduledoc false
# Collects complexity errors into the result.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, options \\ []) do
max = Keyword.get(options, :max_complexity, :infinity)
operation = Blueprint.current_operation(input)
fun = &handle_node(&1, max, &2)
{operation, errors} = Blueprint.prewalk(operation, [], fun)
blueprint = Blueprint.update_current(input, fn _ -> operation end)
blueprint = put_in(blueprint.execution.validation_errors, errors)
case {errors, Map.new(options)} do
{[], _} ->
{:ok, blueprint}
{_errors, %{jump_phases: true, result_phase: abort_phase}} ->
{:jump, blueprint, abort_phase}
_ ->
{:error, blueprint}
end
end
defp handle_node(%{complexity: complexity} = node, max, errors)
when is_integer(complexity) and complexity > max do
error = error(node, complexity, max)
node =
node
|> flag_invalid(:too_complex)
|> put_error(error)
{node, [error | errors]}
end
defp handle_node(%{complexity: _} = node, _, errors) do
{:halt, node, errors}
end
defp handle_node(node, _, errors) do
{node, errors}
end
defp error(%{source_location: location} = node, complexity, max) do
%Phase.Error{
phase: __MODULE__,
message: error_message(node, complexity, max),
locations: [location]
}
end
def error_message(node, complexity, max) do
"#{describe_node(node)} is too complex: complexity is #{complexity} and maximum is #{max}"
end
defp describe_node(%Blueprint.Document.Operation{name: nil}) do
"Operation"
end
defp describe_node(%Blueprint.Document.Operation{name: name}) do
"Operation #{name}"
end
defp describe_node(%Blueprint.Document.Field{name: name}) do
"Field #{name}"
end
defp describe_node(%Blueprint.Document.Fragment.Spread{name: name}) do
"Spread #{name}"
end
end
defmodule Absinthe.Phase.Document.Uses do
@moduledoc false
# Tracks uses of:
# - Variables
# - Fragments
use Absinthe.Phase
alias Absinthe.Blueprint
@typep acc_t :: %{
fragments_available: [Blueprint.Document.Fragment.Named.t()],
fragments: [Blueprint.Document.Fragment.Named.Use.t()],
variables: [Blueprint.Input.Variable.Use.t()]
}
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, _options \\ []) do
ops = Enum.map(input.operations, &add_uses(&1, input))
node = %{input | operations: ops}
{:ok, node}
end
@spec add_uses(Blueprint.Document.Operation.t(), Blueprint.t()) ::
Blueprint.Document.Operation.t()
defp add_uses(%Blueprint.Document.Operation{} = node, doc) do
acc = %{
fragments_available: doc.fragments,
fragments: [],
variables: []
}
{_, acc} = Blueprint.prewalk(node, acc, &handle_use/2)
%{
node
| fragment_uses: acc.fragments ++ node.fragment_uses,
variable_uses: acc.variables ++ node.variable_uses
}
end
@spec handle_use(Blueprint.node_t(), acc_t) :: {Blueprint.node_t(), acc_t}
defp handle_use(%Blueprint.Document.Fragment.Spread{} = node, acc) do
if uses?(acc.fragments, node) do
{node, acc}
else
target_fragment = Enum.find(acc.fragments_available, &(&1.name == node.name))
if target_fragment do
acc = acc |> put_use(target_fragment)
{_, acc} = Blueprint.prewalk(target_fragment, acc, &handle_use/2)
{node, acc}
else
{node, acc}
end
end
end
defp handle_use(%Blueprint.Input.Variable{} = node, acc) do
{node, put_use(acc, node)}
end
defp handle_use(node, acc) do
{node, acc}
end
@spec uses?([Blueprint.use_t()], Blueprint.Document.Fragment.Spread.t()) :: boolean
defp uses?(list, node) do
Enum.find(list, &(&1.name == node.name))
end
@spec put_use(acc_t, Blueprint.node_t()) :: acc_t
defp put_use(acc, %Blueprint.Input.Variable{} = node) do
ref = Blueprint.Input.Variable.to_use(node)
update_in(acc.variables, &[ref | &1])
end
defp put_use(acc, %Blueprint.Document.Fragment.Named{} = node) do
ref = Blueprint.Document.Fragment.Named.to_use(node)
update_in(acc.fragments, &[ref | &1])
end
end
defmodule Absinthe.Phase.Document.MissingVariables do
@moduledoc false
# Fills out missing arguments and input object fields.
#
# Filling out means inserting a stubbed `Input.Argument` or `Input.Field` struct.
#
# Only those arguments which are non null and / or have a default value are filled
# out.
#
# If an argument or input object field is non null and missing, it is marked invalid
use Absinthe.Phase
alias Absinthe.{Blueprint, Type}
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, _options \\ []) do
node = Blueprint.prewalk(input, &handle_node/1)
{:ok, node}
end
defp handle_node(
%Blueprint.Input.Argument{schema_node: schema_node, input_value: %{normalized: nil}} =
node
) do
handle_defaults(node, schema_node)
end
defp handle_node(
%Blueprint.Input.Field{schema_node: schema_node, input_value: %{normalized: nil}} = node
) do
handle_defaults(node, schema_node)
end
defp handle_node(node), do: node
defp handle_defaults(node, schema_node) do
case schema_node do
%{default_value: val} when not is_nil(val) ->
fill_default(node, val)
%{deprecation: %{}} ->
node
%{type: %Type.NonNull{}} ->
node |> flag_invalid(:missing)
_ ->
node
end
end
defp fill_default(%{input_value: input} = node, val) do
input = %{input | data: val, normalized: %Blueprint.Input.Generated{by: __MODULE__}}
%{node | input_value: input}
end
end
defmodule Absinthe.Phase.Document.Execution.Resolution do
@moduledoc false
# Runs resolution functions in a blueprint.
#
# Blueprint results are placed under `blueprint.result.execution`. This is
# because the results form basically a new tree from the original blueprint.
alias Absinthe.{Blueprint, Type, Phase}
alias Blueprint.{Result, Execution}
alias Absinthe.Phase
use Absinthe.Phase
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(bp_root, options \\ []) do
case Blueprint.current_operation(bp_root) do
nil -> {:ok, bp_root}
op -> resolve_current(bp_root, op, options)
end
end
defp resolve_current(bp_root, operation, options) do
execution = perform_resolution(bp_root, operation, options)
blueprint = %{bp_root | execution: execution}
if Keyword.get(options, :plugin_callbacks, true) do
bp_root.schema.plugins()
|> Absinthe.Plugin.pipeline(execution)
|> case do
[] ->
{:ok, blueprint}
pipeline ->
{:insert, blueprint, pipeline}
end
else
{:ok, blueprint}
end
end
defp perform_resolution(bp_root, operation, options) do
exec = Execution.get(bp_root, operation)
plugins = bp_root.schema.plugins()
run_callbacks? = Keyword.get(options, :plugin_callbacks, true)
exec = plugins |> run_callbacks(:before_resolution, exec, run_callbacks?)
{result, exec} =
exec.result
|> walk_result(operation, operation.schema_node, exec, [operation])
|> propagate_null_trimming
exec = plugins |> run_callbacks(:after_resolution, exec, run_callbacks?)
%{exec | result: result}
end
defp run_callbacks(plugins, callback, acc, true) do
Enum.reduce(plugins, acc, &apply(&1, callback, [&2]))
end
defp run_callbacks(_, _, acc, _), do: acc
@doc """
This function walks through any existing results. If no results are found at a
given node, it will call the requisite function to expand and build those results
"""
def walk_result(%{fields: nil} = result, bp_node, _schema_type, exec, path) do
{fields, exec} = resolve_fields(bp_node, exec, result.root_value, path)
{%{result | fields: fields}, exec}
end
def walk_result(%{fields: fields} = result, bp_node, schema_type, exec, path) do
{fields, exec} = walk_results(fields, bp_node, schema_type, exec, [0 | path], [])
{%{result | fields: fields}, exec}
end
def walk_result(%Result.Leaf{} = result, _, _, exec, _) do
{result, exec}
end
def walk_result(%{values: values} = result, bp_node, schema_type, exec, path) do
{values, exec} = walk_results(values, bp_node, schema_type, exec, [0 | path], [])
{%{result | values: values}, exec}
end
def walk_result(%Absinthe.Resolution{} = res, _bp_node, _schema_type, exec, _path) do
res = update_persisted_fields(res, exec)
do_resolve_field(res, exec, res.source, res.path)
end
# walk list results
defp walk_results([value | values], bp_node, inner_type, exec, [i | sub_path] = path, acc) do
{result, exec} = walk_result(value, bp_node, inner_type, exec, path)
walk_results(values, bp_node, inner_type, exec, [i + 1 | sub_path], [result | acc])
end
defp walk_results([], _, _, exec, _, acc), do: {:lists.reverse(acc), exec}
defp resolve_fields(parent, exec, source, path) do
# parent is the parent field, we need to get the return type of that field
# that return type could be an interface or union, so let's make it concrete
parent
|> get_return_type
|> get_concrete_type(source, exec)
|> case do
nil ->
{[], exec}
parent_type ->
{fields, fields_cache} =
Absinthe.Resolution.Projector.project(
parent.selections,
parent_type,
path,
exec.fields_cache,
exec
)
exec = %{exec | fields_cache: fields_cache}
do_resolve_fields(fields, exec, source, parent_type, path, [])
end
end
defp get_return_type(%{schema_node: %Type.Field{type: type}}) do
Type.unwrap(type)
end
defp get_return_type(%{schema_node: schema_node}) do
Type.unwrap(schema_node)
end
defp get_return_type(type), do: type
defp get_concrete_type(%Type.Union{} = parent_type, source, exec) do
Type.Union.resolve_type(parent_type, source, exec)
end
defp get_concrete_type(%Type.Interface{} = parent_type, source, exec) do
Type.Interface.resolve_type(parent_type, source, exec)
end
defp get_concrete_type(parent_type, _source, _exec) do
parent_type
end
defp do_resolve_fields([field | fields], exec, source, parent_type, path, acc) do
{result, exec} = resolve_field(field, exec, source, parent_type, [field | path])
do_resolve_fields(fields, exec, source, parent_type, path, [result | acc])
end
defp do_resolve_fields([], exec, _, _, _, acc), do: {:lists.reverse(acc), exec}
def resolve_field(field, exec, source, parent_type, path) do
exec
|> build_resolution_struct(field, source, parent_type, path)
|> do_resolve_field(exec, source, path)
end
# bp_field needs to have a concrete schema node, AKA no unions or interfaces
defp do_resolve_field(res, exec, source, path) do
res
|> reduce_resolution
|> case do
%{state: :resolved} = res ->
exec = update_persisted_fields(exec, res)
build_result(res, exec, source, path)
%{state: :suspended} = res ->
exec = update_persisted_fields(exec, res)
{res, exec}
final_res ->
raise """
Should have halted or suspended middleware
Started with: #{inspect(res)}
Ended with: #{inspect(final_res)}
"""
end
end
defp update_persisted_fields(dest, %{acc: acc, context: context, fields_cache: cache}) do
%{dest | acc: acc, context: context, fields_cache: cache}
end
defp build_resolution_struct(exec, bp_field, source, parent_type, path) do
common =
Map.take(exec, [:adapter, :context, :acc, :root_value, :schema, :fragments, :fields_cache])
%Absinthe.Resolution{
path: path,
source: source,
parent_type: parent_type,
middleware: bp_field.schema_node.middleware,
definition: bp_field,
arguments: bp_field.argument_data
}
|> Map.merge(common)
end
defp reduce_resolution(%{middleware: []} = res), do: res
defp reduce_resolution(%{middleware: [middleware | remaining_middleware]} = res) do
case call_middleware(middleware, %{res | middleware: remaining_middleware}) do
%{state: :suspended} = res ->
res
res ->
reduce_resolution(res)
end
end
defp call_middleware({{mod, fun}, opts}, res) do
apply(mod, fun, [res, opts])
end
defp call_middleware({mod, opts}, res) do
apply(mod, :call, [res, opts])
end
defp call_middleware(mod, res) when is_atom(mod) do
apply(mod, :call, [res, []])
end
defp call_middleware(fun, res) when is_function(fun, 2) do
fun.(res, [])
end
defp build_result(%{errors: errors} = res, exec, source, path) do
%{
value: value,
definition: bp_field,
extensions: extensions
} = res
full_type = Type.expand(bp_field.schema_node.type, exec.schema)
bp_field = put_in(bp_field.schema_node.type, full_type)
# if there are any errors, the value is always nil
value =
case errors do
[] -> value
_ -> nil
end
errors = maybe_add_non_null_error(errors, value, full_type)
value
|> to_result(bp_field, full_type, extensions)
|> add_errors(Enum.reverse(errors), &put_result_error_value(&1, &2, bp_field, source, path))
|> walk_result(bp_field, full_type, exec, path)
|> propagate_null_trimming
end
defp maybe_add_non_null_error(errors, nil, %Type.NonNull{}) do
["Cannot return null for non-nullable field" | errors]
end
defp maybe_add_non_null_error(errors, _, _) do
errors
end
defp propagate_null_trimming({%{values: values} = node, exec}) do
values = Enum.map(values, &do_propagate_null_trimming/1)
node = %{node | values: values}
{do_propagate_null_trimming(node), exec}
end
defp propagate_null_trimming({node, exec}) do
{do_propagate_null_trimming(node), exec}
end
defp do_propagate_null_trimming(node) do
if bad_child = find_bad_child(node) do
bp_field = node.emitter
full_type =
with %{type: type} <- bp_field.schema_node do
type
end
nil
|> to_result(bp_field, full_type, node.extensions)
|> Map.put(:errors, bad_child.errors)
# ^ We don't have to worry about clobbering the current node's errors because,
# if it had any errors, it wouldn't have any children and we wouldn't be
# here anyway.
else
node
end
end
defp find_bad_child(%{fields: fields}) do
Enum.find(fields, &non_null_violation?/1)
end
defp find_bad_child(%{values: values}) do
Enum.find(values, &non_null_list_violation?/1)
end
defp find_bad_child(_) do
false
end
# FIXME: Not super happy with this lookup process
defp non_null_violation?(%{value: nil, emitter: %{schema_node: %{type: %Type.NonNull{}}}}) do
true
end
defp non_null_violation?(_) do
false
end
# FIXME: Not super happy with this lookup process.
# Also it would be nice if we could use the same function as above.
defp non_null_list_violation?(%{
value: nil,
emitter: %{schema_node: %{type: %Type.List{of_type: %Type.NonNull{}}}}
}) do
true
end
defp non_null_list_violation?(_) do
false
end
# defp maybe_add_non_null_error(errors, nil, %)
defp add_errors(result, errors, fun) do
Enum.reduce(errors, result, fun)
end
defp put_result_error_value(error_value, result, bp_field, source, path) do
case split_error_value(error_value) do
{[], _} ->
raise Absinthe.Resolution.result_error(error_value, bp_field, source)
{[message: message], extra} ->
put_error(result, error(bp_field, message, path, Map.new(extra)))
end
end
defp split_error_value(error_value) when is_list(error_value) or is_map(error_value) do
Keyword.split(Enum.to_list(error_value), [:message])
end
defp split_error_value(error_value) when is_binary(error_value) do
{[message: error_value], []}
end
defp split_error_value(error_value) do
{[message: to_string(error_value)], []}
end
defp to_result(nil, blueprint, _, extensions) do
%Result.Leaf{emitter: blueprint, value: nil, extensions: extensions}
end
defp to_result(root_value, blueprint, %Type.NonNull{of_type: inner_type}, extensions) do
to_result(root_value, blueprint, inner_type, extensions)
end
defp to_result(root_value, blueprint, %Type.Object{}, extensions) do
%Result.Object{root_value: root_value, emitter: blueprint, extensions: extensions}
end
defp to_result(root_value, blueprint, %Type.Interface{}, extensions) do
%Result.Object{root_value: root_value, emitter: blueprint, extensions: extensions}
end
defp to_result(root_value, blueprint, %Type.Union{}, extensions) do
%Result.Object{root_value: root_value, emitter: blueprint, extensions: extensions}
end
defp to_result(root_value, blueprint, %Type.List{of_type: inner_type}, extensions) do
values =
root_value
|> List.wrap()
|> Enum.map(&to_result(&1, blueprint, inner_type, extensions))
%Result.List{values: values, emitter: blueprint, extensions: extensions}
end
defp to_result(root_value, blueprint, %Type.Scalar{}, extensions) do
%Result.Leaf{
emitter: blueprint,
value: root_value,
extensions: extensions
}
end
defp to_result(root_value, blueprint, %Type.Enum{}, extensions) do
%Result.Leaf{
emitter: blueprint,
value: root_value,
extensions: extensions
}
end
def error(node, message, path, extra) do
%Phase.Error{
phase: __MODULE__,
message: message,
locations: [node.source_location],
path: Absinthe.Resolution.path(%{path: path}),
extra: extra
}
end
end
defmodule Absinthe.Phase.Document.Result do
@moduledoc false
# Produces data fit for external encoding from annotated value tree
alias Absinthe.{Blueprint, Phase, Type}
use Absinthe.Phase
@spec run(Blueprint.t() | Phase.Error.t(), Keyword.t()) :: {:ok, map}
def run(%Blueprint{} = bp, _options \\ []) do
result = Map.merge(bp.result, process(bp))
{:ok, %{bp | result: result}}
end
defp process(blueprint) do
result =
case blueprint.execution do
%{validation_errors: [], result: result} ->
{:ok, data(result, [])}
%{validation_errors: errors} ->
{:validation_failed, errors}
end
format_result(result)
end
defp format_result(:execution_failed) do
%{data: nil}
end
defp format_result({:ok, {data, []}}) do
%{data: data}
end
defp format_result({:ok, {data, errors}}) do
errors = errors |> Enum.uniq() |> Enum.map(&format_error/1)
%{data: data, errors: errors}
end
defp format_result({:validation_failed, errors}) do
errors = errors |> Enum.uniq() |> Enum.map(&format_error/1)
%{errors: errors}
end
defp format_result({:parse_failed, error}) do
%{errors: [format_error(error)]}
end
defp data(%{errors: [_ | _] = field_errors}, errors), do: {nil, field_errors ++ errors}
# Leaf
defp data(%{value: nil}, errors), do: {nil, errors}
defp data(%{value: value, emitter: emitter}, errors) do
value =
case Type.unwrap(emitter.schema_node.type) do
%Type.Scalar{} = schema_node ->
Type.Scalar.serialize(schema_node, value)
%Type.Enum{} = schema_node ->
Type.Enum.serialize(schema_node, value)
end
{value, errors}
end
# Object
defp data(%{fields: fields}, errors), do: field_data(fields, errors)
# List
defp data(%{values: values}, errors), do: list_data(values, errors)
defp list_data(fields, errors, acc \\ [])
defp list_data([], errors, acc), do: {:lists.reverse(acc), errors}
defp list_data([%{errors: errs} = field | fields], errors, acc) do
{value, errors} = data(field, errors)
list_data(fields, errs ++ errors, [value | acc])
end
defp field_data(fields, errors, acc \\ [])
defp field_data([], errors, acc), do: {Map.new(acc), errors}
defp field_data([%Absinthe.Resolution{} = res | _], _errors, _acc) do
raise """
Found unresolved resolution struct!
You probably forgot to run the resolution phase again.
#{inspect(res)}
"""
end
defp field_data([field | fields], errors, acc) do
{value, errors} = data(field, errors)
field_data(fields, errors, [{field_name(field.emitter), value} | acc])
end
defp field_name(%{alias: nil, name: name}), do: name
defp field_name(%{alias: name}), do: name
defp field_name(%{name: name}), do: name
defp format_error(%Phase.Error{locations: []} = error) do
error_object = %{message: error.message}
Map.merge(error.extra, error_object)
end
defp format_error(%Phase.Error{} = error) do
error_object = %{
message: error.message,
locations: Enum.flat_map(error.locations, &format_location/1)
}
error_object =
case error.path do
[] -> error_object
path -> Map.put(error_object, :path, path)
end
Map.merge(Map.new(error.extra), error_object)
end
defp format_location(%{line: line, column: col}) do
[%{line: line || 0, column: col || 0}]
end
defp format_location(_), do: []
end
defmodule Absinthe.Phase.Document.Context do
@moduledoc "Pass on context and root value to document."
use Absinthe.Phase
alias Absinthe.Blueprint
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(blueprint, options \\ []) do
context = Map.merge(blueprint.execution.context, options[:context] || %{})
blueprint = put_in(blueprint.execution.context, context)
root_value = Map.merge(blueprint.execution.root_value, options[:root_value] || %{})
blueprint = put_in(blueprint.execution.root_value, root_value)
{:ok, blueprint}
end
end
defmodule Absinthe.Phase.Document.Directives do
@moduledoc false
# Expand all directives in the document.
#
# Note that no validation occurs in this phase.
use Absinthe.Phase
alias Absinthe.Blueprint
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, _options \\ []) do
node = Blueprint.prewalk(input, &handle_node/1)
{:ok, node}
end
@spec handle_node(Blueprint.node_t()) :: Blueprint.node_t()
defp handle_node(%{directives: directives} = node) do
Enum.reduce(directives, node, fn directive, acc ->
Blueprint.Directive.expand(directive, acc)
end)
end
defp handle_node(node) do
node
end
end
defmodule Absinthe.Phase.Document.CurrentOperation do
@moduledoc false
# Selects the current operation.
#
# - If an operation name is given, the matching operation is marked as current.
# - If no operation name is provided and the there is only one operation,
# it is set as current.
#
# Note that no validation occurs in this phase.
use Absinthe.Phase
alias Absinthe.Blueprint
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, options \\ []) do
operations = process(input.operations, Map.new(options))
result = %{input | operations: operations}
{:ok, result}
end
defp process([op], %{operation_name: nil}) do
[%{op | current: true}]
end
defp process([%{name: name} = op], %{operation_name: name}) do
[%{op | current: true}]
end
defp process(ops, %{operation_name: name}) do
Enum.map(ops, fn
%{name: ^name} = op ->
%{op | current: true}
op ->
op
end)
end
defp process(ops, _) do
ops
end
end
defmodule Absinthe.Phase.Document.Arguments.Normalize do
@moduledoc false
# Populate all arguments in the document with their provided values:
#
# - If a literal value is provided for an argument, set the `Argument.t`'s
# `normalized_value` field to that value.
# - If a variable is provided for an argument, set the `Argument.t`'s
# `normalized_value` to the reconciled value for the variable
# (Note: this requires the `Phase.Document.Variables` phase as a
# prerequisite).
#
# Note that no validation occurs in this phase.
use Absinthe.Phase
alias Absinthe.Blueprint
alias Absinthe.Blueprint.Input
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, _options \\ []) do
provided_values = get_provided_values(input)
node = Blueprint.prewalk(input, &handle_node(&1, provided_values))
{:ok, node}
end
@spec get_provided_values(Blueprint.t()) :: map
defp get_provided_values(input) do
case Blueprint.current_operation(input) do
nil -> %{}
operation -> operation.provided_values
end
end
defp handle_node(
%Input.RawValue{content: %Input.Variable{name: variable_name}} = node,
provided_values
) do
%Input.Value{
normalized: Map.get(provided_values, variable_name),
raw: node
}
end
# Argument not using a variable: Set provided value from the raw value
defp handle_node(%Input.RawValue{} = node, _provided_values) do
%Input.Value{
normalized: node.content,
raw: node
}
end
defp handle_node(node, _provided_values) do
node
end
end
defmodule Absinthe.Phase.Document.Arguments.FlagInvalid do
@moduledoc false
# Marks arguments as bad if they have any invalid children.
#
# This is later used by the ArgumentsOfCorrectType phase.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
@doc """
Run this validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.postwalk(input, &handle_node/1)
{:ok, result}
end
defp handle_node(%{schema_node: nil, flags: %{}} = node) do
node |> flag_invalid(:extra)
end
defp handle_node(%Blueprint.Input.Argument{} = node) do
check_child(node, node.input_value.normalized, :bad_argument)
end
defp handle_node(%Blueprint.Input.Field{} = node) do
check_child(node, node.input_value.normalized, :bad_field)
end
defp handle_node(%Blueprint.Input.List{} = node) do
check_children(node, node.items |> Enum.map(& &1.normalized), :bad_list)
end
defp handle_node(%Blueprint.Input.Object{} = node) do
check_children(node, node.fields, :bad_object)
end
defp handle_node(node), do: node
defp check_child(node, %{flags: %{invalid: _}}, flag) do
flag_invalid(node, flag)
end
defp check_child(node, _, _) do
node
end
defp check_children(node, children, flag) do
invalid? = fn
%{flags: %{invalid: _}} -> true
_ -> false
end
if Enum.any?(children, invalid?) do
flag_invalid(node, flag)
else
node
end
end
end
defmodule Absinthe.Phase.Document.Arguments.CoerceEnums do
@moduledoc false
# Coerce variable string inputs to enums when appropriate.
#
# A literal enum like `foo(arg: ENUM)` is parsed as an `Input.Enum` struct.
#
# However when a variable is used `foo(arg: $enumVar)` the variable input ends up
# being an `Input.String` because the variable handler does not yet know the
# schema type. This phase coerces string to enum inputs when the schema type
# is an Enum.
use Absinthe.Phase
alias Absinthe.{Blueprint, Type}
alias Absinthe.Blueprint.Input
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, _options \\ []) do
node = Blueprint.prewalk(input, &coerce_node/1)
{:ok, node}
end
defp coerce_node(%Input.Value{raw: %{content: %Input.Variable{}}} = node) do
node =
Blueprint.prewalk(node, fn
%Input.String{} = input ->
case Type.unwrap(input.schema_node) do
%Type.Enum{} ->
Map.put(input, :__struct__, Input.Enum)
_ ->
input
end
node ->
node
end)
{:halt, node}
end
defp coerce_node(node), do: node
end
defmodule Absinthe.Phase.Document.Arguments.CoerceLists do
@moduledoc false
# Coerce non-list inputs to lists when appropriate.
#
# IE
# ```
# foo(ids: 1)
# ```
# becomes
# ```
# foo(ids: [1])
# ```
#
# if `ids` is a list type.
use Absinthe.Phase
alias Absinthe.{Blueprint, Type}
alias Absinthe.Blueprint.Input
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, _options \\ []) do
node = Blueprint.prewalk(input, &coerce_node/1)
{:ok, node}
end
defp coerce_node(%Input.Value{normalized: nil} = node), do: node
defp coerce_node(%Input.Value{normalized: %Input.Null{}} = node) do
node
end
defp coerce_node(%Input.Value{} = node) do
case Type.unwrap_non_null(node.schema_node) do
%Type.List{} ->
%{node | normalized: Input.List.wrap(node.normalized, node.schema_node)}
_ ->
node
end
end
defp coerce_node(node), do: node
end
defmodule Absinthe.Phase.Document.Arguments.Parse do
@moduledoc false
# Parses Leaf Node inputs
alias Absinthe.Blueprint.Input
alias Absinthe.{Blueprint, Type}
use Absinthe.Phase
def run(input, options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, options[:context] || %{}))
{:ok, result}
end
defp handle_node(%{schema_node: nil} = node, _context) do
{:halt, node}
end
defp handle_node(%{normalized: nil} = node, _context) do
node
end
defp handle_node(%Input.Value{normalized: normalized} = node, context) do
case build_value(normalized, node.schema_node, context) do
{:ok, value} ->
%{node | data: value}
:not_leaf_node ->
node
{:error, flag} ->
%{node | normalized: normalized |> flag_invalid(flag)}
end
end
defp handle_node(node, _context), do: node
defp build_value(%Input.Null{}, %Type.NonNull{}, _) do
{:error, :non_null}
end
defp build_value(normalized, %Type.Scalar{} = schema_node, context) do
case Type.Scalar.parse(schema_node, normalized, context) do
:error ->
{:error, :bad_parse}
{:ok, val} ->
{:ok, val}
end
end
defp build_value(%Input.Null{}, %Type.Enum{}, _) do
{:ok, nil}
end
defp build_value(normalized, %Type.Enum{} = schema_node, _) do
case Type.Enum.parse(schema_node, normalized) do
{:ok, %{value: value}} ->
{:ok, value}
:error ->
{:error, :bad_parse}
end
end
defp build_value(normalized, %Type.NonNull{of_type: inner_type}, context) do
build_value(normalized, inner_type, context)
end
defp build_value(_, _, _) do
:not_leaf_node
end
end
defmodule Absinthe.Phase.Document.Arguments.Data do
@moduledoc false
# Populate all arguments in the document with their provided data values:
#
# - If valid data is available for an argument, set the `Argument.t`'s
# `data_value` field to that value.
# - If no valid data is available for an argument, set the `Argument.t`'s
# `data_value` to `nil`.
# - When determining the value of the argument, mark any invalid nodes
# in the `Argument.t`'s `normalized_value` tree with `:invalid` and a
# reason.
# - If non-null arguments are not provided (eg, a `Argument.t` is missing
# from `normalized_value`), add a stub `Argument.t` and flag it as
# `:invalid` and `:missing`.
# - If non-null input fields are not provided (eg, an `Input.Field.t` is
# missing from `normalized_value`), add a stub `Input.Field.t` and flag it as
# `:invalid` and `:missing`.
#
# Note that the limited validation that occurs in this phase is limited to
# setting the `data_value` to `nil`, adding flags to the `normalized_value`,
# and building stub fields/arguments when missing values are required. Actual
# addition of errors is handled by validation phases.
alias Absinthe.Blueprint.Input
alias Absinthe.{Blueprint}
use Absinthe.Phase
def run(input, _options \\ []) do
# By using a postwalk we can worry about leaf nodes first (scalars, enums),
# and then for list and objects merely grab the data values.
result = Blueprint.postwalk(input, &handle_node/1)
{:ok, result}
end
def handle_node(%Blueprint.Document.Field{arguments: []} = node) do
node
end
def handle_node(%Blueprint.Document.Field{arguments: args} = node) do
%{node | argument_data: Input.Argument.value_map(args)}
end
def handle_node(%Input.Argument{input_value: input} = node) do
%{node | value: input.data}
end
def handle_node(%Input.Value{normalized: %Input.List{items: items}} = node) do
data_list = for %{data: data} = item <- items, Input.Value.valid?(item), do: data
%{node | data: data_list}
end
def handle_node(%Input.Value{normalized: %Input.Object{fields: fields}} = node) do
data =
for field <- fields, include_field?(field), into: %{} do
{field.schema_node.identifier, field.input_value.data}
end
%{node | data: data}
end
def handle_node(node) do
node
end
defp include_field?(%{input_value: %{normalized: %Input.Null{}}}), do: true
defp include_field?(%{input_value: %{data: nil}}), do: false
defp include_field?(_), do: true
end
defmodule Absinthe.Phase.Document.Validation.OnlyOneSubscription do
@moduledoc false
# Validates document to ensure that the only variables that are used in a
# document are defined on the operation.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
bp =
Blueprint.update_current(input, fn
%{type: :subscription} = op ->
check_op(op)
op ->
op
end)
{:ok, bp}
end
defp check_op(%{selections: [_, _ | _]} = op) do
error = %Phase.Error{
phase: __MODULE__,
message: "Only one field is permitted on the root object when subscribing",
locations: [op.source_location]
}
op
|> flag_invalid(:too_many_fields)
|> put_error(error)
end
defp check_op(op), do: op
end
defmodule Absinthe.Phase.Document.Validation.UniqueVariableNames do
@moduledoc false
# Validates document to ensure that all variable definitions for an operation
# have unique names.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
operations =
for operation <- input.operations do
variable_definitions =
for variable <- operation.variable_definitions do
process(variable, operation.variable_definitions)
end
%{operation | variable_definitions: variable_definitions}
end
result = %{input | operations: operations}
{:ok, result}
end
@spec process(Blueprint.Document.VariableDefinition.t(), [
Blueprint.Document.VariableDefinition.t()
]) :: Blueprint.Document.VariableDefinition.t()
defp process(variable_definition, variable_definitions) do
if duplicate?(variable_definitions, variable_definition) do
variable_definition
|> flag_invalid(:duplicate_name)
|> put_error(error(variable_definition))
else
variable_definition
end
end
# Whether a duplicate variable_definition is present
@spec duplicate?(
[Blueprint.Document.VariableDefinition.t()],
Blueprint.Document.VariableDefinition.t()
) :: boolean
defp duplicate?(variable_definitions, variable_definition) do
Enum.count(variable_definitions, &(&1.name == variable_definition.name)) > 1
end
# Generate an error for a duplicate variable_definition.
@spec error(Blueprint.Document.VariableDefinition.t()) :: Phase.Error.t()
defp error(node) do
%Phase.Error{
phase: __MODULE__,
message: error_message(node.name),
locations: [node.source_location]
}
end
@doc """
Generate an error message for a duplicate variable definition.
"""
@spec error_message(String.t()) :: String.t()
def error_message(name) do
~s(There can only be one variable named "#{name}".)
end
end
defmodule Absinthe.Phase.Document.Validation.ArgumentsOfCorrectType do
@moduledoc false
# Validates document to ensure that all arguments are of the correct type.
alias Absinthe.{Blueprint, Phase, Schema, Type}
use Absinthe.Phase
@doc """
Run this validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input.schema))
{:ok, result}
end
# Check arguments, objects, fields, and lists
@spec handle_node(Blueprint.node_t(), Schema.t()) :: Blueprint.node_t()
# handled by Phase.Document.Validation.KnownArgumentNames
defp handle_node(%Blueprint.Input.Argument{schema_node: nil} = node, _schema) do
{:halt, node}
end
# handled by Phase.Document.Validation.ProvidedNonNullArguments
defp handle_node(%Blueprint.Input.Argument{input_value: %{normalized: nil}} = node, _schema) do
{:halt, node}
end
defp handle_node(%Blueprint.Input.Argument{flags: %{invalid: _}} = node, schema) do
descendant_errors = collect_child_errors(node.input_value, schema)
message =
error_message(
node.name,
Blueprint.Input.inspect(node.input_value),
descendant_errors
)
error = error(node, message)
node = node |> put_error(error)
{:halt, node}
end
defp handle_node(node, _) do
node
end
defp collect_child_errors(%Blueprint.Input.List{} = node, schema) do
node.items
|> Enum.map(& &1.normalized)
|> Enum.with_index()
|> Enum.flat_map(fn
{%{schema_node: nil} = child, _} ->
collect_child_errors(child, schema)
{%{flags: %{invalid: _}} = child, idx} ->
child_type_name =
child.schema_node
|> Type.value_type(schema)
|> Type.name(schema)
child_inspected_value = Blueprint.Input.inspect(child)
[
value_error_message(idx, child_type_name, child_inspected_value)
| collect_child_errors(child, schema)
]
{child, _} ->
collect_child_errors(child, schema)
end)
end
defp collect_child_errors(%Blueprint.Input.Object{} = node, schema) do
node.fields
|> Enum.flat_map(fn
%{flags: %{invalid: _}, schema_node: nil} = child ->
[unknown_field_error_message(child.name)]
%{flags: %{invalid: _}} = child ->
child_type_name =
Type.value_type(child.schema_node, schema)
|> Type.name(schema)
child_errors =
case child.schema_node do
%Type.Scalar{} -> []
%Type.Enum{} -> []
_ -> collect_child_errors(child.input_value, schema)
end
child_inspected_value = Blueprint.Input.inspect(child.input_value)
[
value_error_message(child.name, child_type_name, child_inspected_value)
| child_errors
]
child ->
collect_child_errors(child.input_value.normalized, schema)
end)
end
defp collect_child_errors(%Blueprint.Input.Value{normalized: norm}, schema) do
collect_child_errors(norm, schema)
end
defp collect_child_errors(_node, _) do
[]
end
# Generate the error for the node
@spec error(Blueprint.node_t(), String.t()) :: Phase.Error.t()
defp error(node, message) do
%Phase.Error{
phase: __MODULE__,
message: message,
locations: [node.source_location]
}
end
def error_message(arg_name, inspected_value, verbose_errors \\ [])
def error_message(arg_name, inspected_value, []) do
~s(Argument "#{arg_name}" has invalid value #{inspected_value}.)
end
def error_message(arg_name, inspected_value, verbose_errors) do
error_message(arg_name, inspected_value) <> "\n" <> Enum.join(verbose_errors, "\n")
end
def value_error_message(id, expected_type_name, inspected_value) when is_integer(id) do
~s(In element ##{id + 1}: ) <>
expected_type_error_message(expected_type_name, inspected_value)
end
def value_error_message(id, expected_type_name, inspected_value) do
~s(In field "#{id}": ) <> expected_type_error_message(expected_type_name, inspected_value)
end
def unknown_field_error_message(field_name) do
~s(In field "#{field_name}": Unknown field.)
end
defp expected_type_error_message(expected_type_name, inspected_value) do
~s(Expected type "#{expected_type_name}", found #{inspected_value}.)
end
end
defmodule Absinthe.Phase.Document.Validation.ProvidedAnOperation do
@moduledoc false
# Validates document to ensure that at least one operation is given.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, options \\ []) do
case {handle_node(input), Map.new(options)} do
{%{flags: %{no_operations: _}} = result,
%{jump_phases: true, validation_result_phase: abort_phase}} ->
{:jump, result, abort_phase}
{result, _} ->
{:ok, result}
end
end
# Check for operation without any operations
@spec handle_node(Blueprint.t()) :: Blueprint.t()
defp handle_node(%Blueprint{operations: []} = node) do
node
|> flag_invalid(:no_operations)
|> put_error(error())
end
defp handle_node(node) do
node
end
@doc """
Generate an error message for the validation.
"""
@spec error_message() :: String.t()
def error_message do
"No operations provided."
end
# Generate the error for the node
@spec error() :: Phase.Error.t()
defp error do
%Phase.Error{
phase: __MODULE__,
message: error_message()
}
end
end
defmodule Absinthe.Phase.Document.Validation.UniqueOperationNames do
@moduledoc false
# Validates document to ensure that all operations have unique names.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
operations =
for operation <- input.operations do
process(operation, input.operations)
end
result = %{input | operations: operations}
{:ok, result}
end
@spec process(Blueprint.Document.Operation.t(), [Blueprint.Document.Operation.t()]) ::
Blueprint.Document.Operation.t()
defp process(%{name: nil} = operation, _) do
operation
end
defp process(operation, operations) do
if duplicate?(operations, operation) do
operation
|> flag_invalid(:duplicate_name)
|> put_error(error(operation))
else
operation
end
end
# Whether a duplicate operation is present
@spec duplicate?([Blueprint.Document.Operation.t()], Blueprint.Document.Operation.t()) ::
boolean
defp duplicate?(operations, operation) do
Enum.count(operations, &(&1.name == operation.name)) > 1
end
# Generate an error for a duplicate operation.
@spec error(Blueprint.Document.Operation.t()) :: Phase.Error.t()
defp error(node) do
%Phase.Error{
phase: __MODULE__,
message: error_message(node.name),
locations: [node.source_location]
}
end
@doc """
Generate an error message for a duplicate operation.
"""
@spec error_message(String.t()) :: String.t()
def error_message(name) do
~s(There can only be one operation named "#{name}".)
end
end
defmodule Absinthe.Phase.Document.Validation.UniqueFragmentNames do
@moduledoc false
# Validates document to ensure that all fragments have unique names.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
fragments =
for fragment <- input.fragments do
process(fragment, input.fragments)
end
result = %{input | fragments: fragments}
{:ok, result}
end
@spec process(Blueprint.Document.Fragment.Named.t(), [Blueprint.Document.Fragment.Named.t()]) ::
Blueprint.Document.Fragment.Named.t()
defp process(fragment, fragments) do
if duplicate?(fragments, fragment) do
fragment
|> flag_invalid(:duplicate_name)
|> put_error(error(fragment))
else
fragment
end
end
# Whether a duplicate fragment is present
@spec duplicate?([Blueprint.Document.Fragment.Named.t()], Blueprint.Document.Fragment.Named.t()) ::
boolean
defp duplicate?(fragments, fragment) do
Enum.count(fragments, &(&1.name == fragment.name)) > 1
end
# Generate an error for a duplicate fragment.
@spec error(Blueprint.Document.Fragment.Named.t()) :: Phase.Error.t()
defp error(node) do
%Phase.Error{
phase: __MODULE__,
message: error_message(node.name),
locations: [node.source_location]
}
end
@doc """
Generate an error message for a duplicate fragment.
"""
@spec error_message(String.t()) :: String.t()
def error_message(name) do
~s(There can only be one fragment named "#{name}".)
end
end
defmodule Absinthe.Phase.Document.Validation.KnownFragmentNames do
@moduledoc false
# Validates document to ensure that only fragment spreads reference named
# fragments that exist.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input))
{:ok, result}
end
# Find the root and check for multiple anonymous operations
@spec handle_node(Blueprint.node_t(), Blueprint.t()) :: Blueprint.node_t()
defp handle_node(%Blueprint.Document.Fragment.Spread{} = node, blueprint) do
case Blueprint.fragment(blueprint, node.name) do
nil ->
node
|> flag_invalid(:bad_fragment_name)
|> put_error(error(node))
_ ->
node
end
end
defp handle_node(node, _) do
node
end
# Generate the error for the node
@spec error(Blueprint.node_t()) :: Phase.Error.t()
defp error(node) do
%Phase.Error{
phase: __MODULE__,
message: ~s(Unknown fragment "#{node.name}"),
locations: [node.source_location]
}
end
end
defmodule Absinthe.Phase.Document.Validation.NoUnusedFragments do
@moduledoc false
# Validates document to ensure that all named fragments are used.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input.operations))
{:ok, result}
end
def handle_node(%Blueprint.Document.Fragment.Named{} = node, operations) do
if uses?(node, operations) do
node
else
node
|> flag_invalid(:not_used)
|> put_error(error(node))
end
end
def handle_node(node, _) do
node
end
@spec uses?(Blueprint.Document.Fragment.Named.t(), [Blueprint.Document.Operation.t()]) ::
boolean
defp uses?(node, operations) do
Enum.any?(operations, &Blueprint.Document.Operation.uses?(&1, node))
end
# Generate the error for the node
@spec error(Blueprint.Document.Fragment.Named.t()) :: Phase.Error.t()
defp error(node) do
%Phase.Error{
phase: __MODULE__,
message: error_message(node.name),
locations: [node.source_location]
}
end
@doc """
Generate an error message for an unused fragment.
"""
@spec error_message(String.t()) :: String.t()
def error_message(name) do
~s(Fragment "#{name}" is never used.)
end
end
defmodule Absinthe.Phase.Document.Validation.NoFragmentCycles do
@moduledoc false
# Ensure that document doesn't have any fragment cycles that could
# result in a loop during execution.
#
# Note that if this phase fails, an error should immediately be given to
# the user.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, options \\ []) do
do_run(input, Map.new(options))
end
@spec do_run(Blueprint.t(), %{validation_result_phase: Phase.t()}) :: Phase.result_t()
def do_run(input, %{validation_result_phase: abort_phase}) do
{fragments, error_count} = check(input.fragments)
result = %{input | fragments: fragments}
if error_count > 0 do
{:jump, result, abort_phase}
else
{:ok, result}
end
end
# Check a list of fragments for cycles
@spec check([Blueprint.Document.Fragment.Named.t()]) ::
{[Blueprint.Document.Fragment.Named.t()], integer}
defp check(fragments) do
graph = :digraph.new([:cyclic])
try do
with {fragments, 0} <- check(fragments, graph) do
fragments = Map.new(fragments, &{&1.name, &1})
fragments =
graph
|> :digraph_utils.topsort()
|> Enum.reverse()
|> Enum.map(&Map.fetch!(fragments, &1))
{fragments, 0}
end
after
:digraph.delete(graph)
end
end
@spec check([Blueprint.Document.Fragment.Named.t()], :digraph.graph()) ::
{[Blueprint.Document.Fragment.Named.t()], integer}
defp check(fragments, graph) do
Enum.each(fragments, fn node -> Blueprint.prewalk(node, &vertex(&1, graph)) end)
{modified, error_count} =
Enum.reduce(fragments, {[], 0}, fn fragment, {processed, error_count} ->
errors_to_add = cycle_errors(fragment, :digraph.get_cycle(graph, fragment.name))
fragment_with_errors = update_in(fragment.errors, &(errors_to_add ++ &1))
{[fragment_with_errors | processed], error_count + length(errors_to_add)}
end)
{modified, error_count}
end
# Add a vertex modeling a fragment
@spec vertex(Blueprint.Document.Fragment.Named.t(), :digraph.graph()) ::
Blueprint.Document.Fragment.Named.t()
defp vertex(%Blueprint.Document.Fragment.Named{} = fragment, graph) do
:digraph.add_vertex(graph, fragment.name)
Blueprint.prewalk(fragment, fn
%Blueprint.Document.Fragment.Spread{} = spread ->
edge(fragment, spread, graph)
spread
node ->
node
end)
fragment
end
defp vertex(fragment, _graph) do
fragment
end
# Add an edge, modeling the relationship between two fragments
@spec edge(
Blueprint.Document.Fragment.Named.t(),
Blueprint.Document.Fragment.Spread.t(),
:digraph.graph()
) :: true
defp edge(fragment, spread, graph) do
:digraph.add_vertex(graph, spread.name)
:digraph.add_edge(graph, fragment.name, spread.name)
true
end
# Generate an error for a cyclic reference
@spec cycle_errors(Blueprint.Document.Fragment.Named.t(), false | [String.t()]) :: [
Phase.Error.t()
]
defp cycle_errors(_, false) do
[]
end
defp cycle_errors(fragment, cycles) do
[cycle_error(fragment, error_message(fragment.name, cycles))]
end
@doc """
Generate the error message.
"""
@spec error_message(String.t(), [String.t()]) :: String.t()
def error_message(fragment_name, [fragment_name]) do
~s(Cannot spread fragment "#{fragment_name}" within itself.)
end
def error_message(fragment_name, [_fragment_name | cycles]) do
deps = Enum.map(cycles, &~s("#{&1}")) |> Enum.join(", ")
~s(Cannot spread fragment "#{fragment_name}" within itself via #{deps}.)
end
# Generate the error for a fragment cycle
@spec cycle_error(Blueprint.Document.Fragment.Named.t(), String.t()) :: Phase.Error.t()
defp cycle_error(fragment, message) do
%Phase.Error{
message: message,
phase: __MODULE__,
locations: [
%{line: fragment.source_location.line, column: fragment.source_location.column}
]
}
end
end
defmodule Absinthe.Phase.Document.Validation.UniqueInputFieldNames do
@moduledoc false
# Validates document to ensure that all input fields have unique names.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node/1)
{:ok, result}
end
# Find input objects
@spec handle_node(Blueprint.node_t()) :: Blueprint.node_t()
defp handle_node(%{normalized: %Blueprint.Input.Object{} = node} = parent) do
fields = Enum.map(node.fields, &process(&1, node.fields))
node = %{node | fields: fields}
%{parent | normalized: node}
end
defp handle_node(node) do
node
end
# Check an input field, finding any duplicates
@spec process(Blueprint.Input.Field.t(), [Blueprint.Input.Field.t()]) ::
Blueprint.Input.Field.t()
defp process(field, fields) do
check_duplicates(field, Enum.filter(fields, &(&1.name == field.name)))
end
# Add flags and errors if necessary for each input field
@spec check_duplicates(Blueprint.Input.Field.t(), [Blueprint.Input.Field.t()]) ::
Blueprint.Input.Field.t()
defp check_duplicates(field, [_single]) do
field
end
defp check_duplicates(field, _multiple) do
field
|> flag_invalid(:duplicate_name)
|> put_error(error(field))
end
# Generate an error for an input field
@spec error(Blueprint.Input.Field.t()) :: Phase.Error.t()
defp error(node) do
%Phase.Error{
phase: __MODULE__,
message: error_message(),
locations: [node.source_location]
}
end
@doc """
Generate the error message.
"""
@spec error_message :: String.t()
def error_message do
"Duplicate input field name."
end
end
defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectType do
@moduledoc false
# Validates document to ensure that all fields are provided on the correct type.
alias Absinthe.{Blueprint, Phase, Schema, Type}
use Absinthe.Phase
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input))
{:ok, result}
end
@spec handle_node(Blueprint.node_t(), Schema.t()) :: Blueprint.node_t()
defp handle_node(%Blueprint.Document.Operation{schema_node: nil} = node, _) do
error = %Phase.Error{
phase: __MODULE__,
message: "Operation \"#{node.type}\" not supported",
locations: [node.source_location]
}
node
|> flag_invalid(:unknown_operation)
|> put_error(error)
end
defp handle_node(
%{selections: selections, schema_node: parent_schema_node} = node,
%{schema: schema} = input
)
when not is_nil(parent_schema_node) do
possible_parent_types = possible_types(parent_schema_node, schema)
selections =
Enum.map(selections, fn
%Blueprint.Document.Field{schema_node: nil} = field ->
type = named_type(parent_schema_node, schema)
field
|> flag_invalid(:unknown_field)
|> put_error(
error(
field,
type.name,
suggested_type_names(field.name, type, input),
suggested_field_names(field.name, type, input)
)
)
%Blueprint.Document.Fragment.Spread{errors: []} = spread ->
fragment = Enum.find(input.fragments, &(&1.name == spread.name))
possible_child_types = possible_types(fragment.schema_node, schema)
if Enum.any?(possible_child_types, &(&1 in possible_parent_types)) do
spread
else
spread_error(spread, possible_parent_types, possible_child_types, schema)
end
%Blueprint.Document.Fragment.Inline{} = fragment ->
possible_child_types = possible_types(fragment.schema_node, schema)
if Enum.any?(possible_child_types, &(&1 in possible_parent_types)) do
fragment
else
spread_error(fragment, possible_parent_types, possible_child_types, schema)
end
other ->
other
end)
%{node | selections: selections}
end
defp handle_node(node, _) do
node
end
defp idents_to_names(idents, schema) do
for ident <- idents do
Absinthe.Schema.lookup_type(schema, ident).name
end
end
defp spread_error(spread, parent_types_idents, child_types_idents, schema) do
parent_types = idents_to_names(parent_types_idents, schema)
child_types = idents_to_names(child_types_idents, schema)
msg = """
Fragment spread has no type overlap with parent.
Parent possible types: #{inspect(parent_types)}
Spread possible types: #{inspect(child_types)}
"""
error = %Phase.Error{
phase: __MODULE__,
message: msg,
locations: [spread.source_location]
}
spread
|> flag_invalid(:invalid_spread)
|> put_error(error)
end
defp possible_types(%{type: type}, schema) do
possible_types(type, schema)
end
defp possible_types(type, schema) do
schema
|> Absinthe.Schema.lookup_type(type)
|> case do
%Type.Object{identifier: identifier} ->
[identifier]
%Type.Interface{identifier: identifier} ->
schema.__absinthe_interface_implementors__
|> Map.fetch!(identifier)
%Type.Union{types: types} ->
types
_ ->
[]
end
end
@spec named_type(Type.t(), Schema.t()) :: Type.named_t()
defp named_type(%Type.Field{} = node, schema) do
Schema.lookup_type(schema, node.type)
end
defp named_type(%{name: _} = node, _) do
node
end
# Generate the error for a field
@spec error(Blueprint.node_t(), String.t(), [String.t()], [String.t()]) :: Phase.Error.t()
defp error(field_node, parent_type_name, type_suggestions, field_suggestions) do
%Phase.Error{
phase: __MODULE__,
message:
error_message(field_node.name, parent_type_name, type_suggestions, field_suggestions),
locations: [field_node.source_location]
}
end
@suggest 5
@doc """
Generate an error for a field
"""
@spec error_message(String.t(), String.t(), [String.t()], [String.t()]) :: String.t()
def error_message(field_name, type_name, type_suggestions \\ [], field_suggestions \\ [])
def error_message(field_name, type_name, [], []) do
~s(Cannot query field "#{field_name}" on type "#{type_name}".)
end
def error_message(field_name, type_name, [], field_suggestions) do
error_message(field_name, type_name) <>
" Did you mean " <> to_quoted_or_list(field_suggestions |> Enum.take(@suggest)) <> "?"
end
def error_message(field_name, type_name, type_suggestions, []) do
error_message(field_name, type_name) <>
" Did you mean to use an inline fragment on " <>
to_quoted_or_list(type_suggestions |> Enum.take(@suggest)) <> "?"
end
def error_message(field_name, type_name, type_suggestions, _) do
error_message(field_name, type_name, type_suggestions)
end
defp suggested_type_names(external_field_name, type, blueprint) do
internal_field_name = blueprint.adapter.to_internal_name(external_field_name, :field)
possible_types = find_possible_types(internal_field_name, type, blueprint.schema)
possible_interfaces =
find_possible_interfaces(internal_field_name, possible_types, blueprint.schema)
possible_interfaces
|> Enum.map(& &1.name)
|> Enum.concat(Enum.map(possible_types, & &1.name))
|> Enum.sort()
end
defp suggested_field_names(external_field_name, %{fields: _} = type, blueprint) do
internal_field_name = blueprint.adapter.to_internal_name(external_field_name, :field)
Map.values(type.fields)
|> Enum.map(& &1.name)
|> Absinthe.Utils.Suggestion.sort_list(internal_field_name)
|> Enum.map(&blueprint.adapter.to_external_name(&1, :field))
|> Enum.sort()
end
defp suggested_field_names(_, _, _) do
[]
end
defp find_possible_interfaces(field_name, possible_types, schema) do
possible_types
|> types_to_interface_idents
|> Enum.uniq()
|> sort_by_implementation_count(possible_types)
|> Enum.map(&Schema.lookup_type(schema, &1))
|> types_with_field(field_name)
end
defp sort_by_implementation_count(iface_idents, types) do
Enum.sort_by(iface_idents, fn iface ->
count =
Enum.count(types, fn
%{interfaces: ifaces} ->
Enum.member?(ifaces, iface)
_ ->
false
end)
count
end)
|> Enum.reverse()
end
defp types_to_interface_idents(types) do
Enum.flat_map(types, fn
%{interfaces: ifaces} ->
ifaces
_ ->
[]
end)
end
defp find_possible_types(field_name, type, schema) do
schema
|> Schema.concrete_types(Type.unwrap(type))
|> types_with_field(field_name)
end
defp types_with_field(types, field_name) do
Enum.filter(types, &type_with_field?(&1, field_name))
end
defp type_with_field?(%{fields: fields}, field_name) do
Map.values(fields)
|> Enum.find(&(&1.name == field_name))
end
defp type_with_field?(_, _) do
false
end
defp to_quoted_or_list([a]), do: ~s("#{a}")
defp to_quoted_or_list([a, b]), do: ~s("#{a}" or "#{b}")
defp to_quoted_or_list(other), do: to_longer_quoted_or_list(other)
defp to_longer_quoted_or_list(list, acc \\ "")
defp to_longer_quoted_or_list([word], acc), do: acc <> ~s(, or "#{word}")
defp to_longer_quoted_or_list([word | rest], "") do
rest
|> to_longer_quoted_or_list(~s("#{word}"))
end
defp to_longer_quoted_or_list([word | rest], acc) do
rest
|> to_longer_quoted_or_list(acc <> ~s(, "#{word}"))
end
end
defmodule Absinthe.Phase.Document.Validation.VariablesAreInputTypes do
@moduledoc false
# Validates document to ensure that all variable definitions are for
# input types.
alias Absinthe.{Blueprint, Phase, Schema, Type}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input.schema))
{:ok, result}
end
# Find variable definitions
@spec handle_node(Blueprint.node_t(), Schema.t()) :: Blueprint.node_t()
defp handle_node(%Blueprint.Document.VariableDefinition{schema_node: nil} = node, _) do
node
end
defp handle_node(%Blueprint.Document.VariableDefinition{} = node, schema) do
type = Schema.lookup_type(schema, node.schema_node)
if Type.input_type?(Type.unwrap(type)) do
node
else
node
|> flag_invalid(:non_input_type)
|> put_error(error(node, Type.name(node.schema_node)))
end
end
defp handle_node(node, _) do
node
end
# Generate an error for an input field
@spec error(Blueprint.Document.VariableDefinition.t(), String.t()) :: Phase.Error.t()
defp error(node, type_rep) do
%Phase.Error{
phase: __MODULE__,
message: error_message(node.name, type_rep),
locations: [node.source_location]
}
end
@doc """
Generate the error message.
"""
@spec error_message(String.t(), String.t()) :: String.t()
def error_message(variable_name, type_rep) do
~s(Variable "#{variable_name}" cannot be non-input type "#{type_rep}".)
end
end
defmodule Absinthe.Phase.Document.Validation.Result do
@moduledoc false
# Collects validation errors into the result.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, options \\ []) do
do_run(input, Map.new(options))
end
@spec do_run(Blueprint.t(), %{result_phase: Phase.t(), jump_phases: boolean}) ::
Phase.result_t()
def do_run(input, %{result_phase: abort_phase, jump_phases: jump}) do
{input, errors} = Blueprint.prewalk(input, [], &handle_node/2)
errors = :lists.reverse(errors)
result = put_in(input.execution.validation_errors, errors)
case {errors, jump} do
{[], _} ->
{:ok, result}
{_, false} ->
{:error, result}
_ ->
{:jump, result, abort_phase}
end
end
# Collect the validation errors from nodes
@spec handle_node(Blueprint.node_t(), [Phase.Error.t()]) ::
{Blueprint.node_t(), [Phase.Error.t()]}
defp handle_node(%{errors: errs} = node, errors) do
{node, :lists.reverse(errs) ++ errors}
end
defp handle_node(%{raw: raw} = node, errors) do
{_, errors} = Blueprint.prewalk(raw, errors, &handle_node/2)
{node, errors}
end
defp handle_node(node, acc), do: {node, acc}
end
defmodule Absinthe.Phase.Document.Validation.SelectedCurrentOperation do
@moduledoc false
# Validates an operation name was provided when needed.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result =
case {Blueprint.current_operation(input), length(input.operations)} do
{nil, count} when count > 1 ->
input
|> flag_invalid(:no_current_operation)
|> put_error(error())
_ ->
input
end
{:ok, result}
end
# Generate the error
@spec error :: Phase.Error.t()
defp error do
%Phase.Error{
phase: __MODULE__,
message: error_message()
}
end
def error_message do
~s(Must provide a valid operation name if query contains multiple operations.)
end
end
defmodule Absinthe.Phase.Document.Validation.UniqueArgumentNames do
@moduledoc false
# Validates document to ensure that all arguments for a field or directive
# have unique names.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node/1)
{:ok, result}
end
@argument_hosts [
Blueprint.Document.Field,
Blueprint.Directive
]
# Find fields and directives to check arguments
@spec handle_node(Blueprint.node_t()) :: Blueprint.node_t()
defp handle_node(%argument_host{} = node) when argument_host in @argument_hosts do
arguments = Enum.map(node.arguments, &process(&1, node.arguments))
%{node | arguments: arguments}
end
defp handle_node(node) do
node
end
# Check an argument, finding any duplicates
@spec process(Blueprint.Input.Argument.t(), [Blueprint.Input.Argument.t()]) ::
Blueprint.Input.Argument.t()
defp process(argument, arguments) do
check_duplicates(argument, Enum.filter(arguments, &(&1.name == argument.name)))
end
# Add flags and errors if necessary for each argument.
@spec check_duplicates(Blueprint.Input.Argument.t(), [Blueprint.Input.Argument.t()]) ::
Blueprint.Input.Argument.t()
defp check_duplicates(argument, [_single]) do
argument
end
defp check_duplicates(argument, _multiple) do
argument
|> flag_invalid(:duplicate_name)
|> put_error(error(argument))
end
# Generate an error for a duplicate argument.
@spec error(Blueprint.Input.Argument.t()) :: Phase.Error.t()
defp error(node) do
%Phase.Error{
phase: __MODULE__,
message: error_message(),
locations: [node.source_location]
}
end
@doc """
Generate the error message.
"""
@spec error_message :: String.t()
def error_message do
"Duplicate argument name."
end
end
defmodule Absinthe.Phase.Document.Validation.KnownArgumentNames do
@moduledoc false
# Validates document to ensure that all arguments are in the schema.
#
# Note: while graphql-js doesn't add errors to unknown arguments that
# are provided to unknown fields, Absinthe does -- but when the errors
# are harvested from the Blueprint tree, only the first layer of unknown
# errors (eg, the field) should be presented to the user.
alias Absinthe.{Blueprint, Phase, Schema, Type}
use Absinthe.Phase
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input.schema))
{:ok, result}
end
@spec handle_node(Blueprint.node_t(), Schema.t()) :: Blueprint.node_t()
defp handle_node(%{schema_node: nil} = node, _schema) do
node
end
defp handle_node(%{selections: _, schema_node: schema_node} = node, schema) do
selections =
Enum.map(node.selections, fn
%{arguments: arguments} = field ->
arguments =
Enum.map(arguments, fn
%{schema_node: nil} = arg ->
arg
|> flag_invalid(:no_schema_node)
|> put_error(field_error(arg, field, type_name(schema_node, schema)))
other ->
other
end)
%{field | arguments: arguments}
other ->
other
end)
%{node | selections: selections}
end
defp handle_node(%Blueprint.Directive{} = node, _) do
arguments =
Enum.map(node.arguments, fn
%{schema_node: nil} = arg ->
arg
|> flag_invalid(:no_schema_node)
|> put_error(directive_error(arg, node))
other ->
other
end)
%{node | arguments: arguments}
end
defp handle_node(node, _) do
node
end
@spec type_name(Type.t(), Schema.t()) :: String.t()
defp type_name(%Type.Field{} = node, schema) do
node.type
|> Type.unwrap()
|> schema.__absinthe_lookup__()
|> Map.fetch!(:name)
end
defp type_name(node, _) do
node.name
end
# Generate the error for a directive argument
@spec directive_error(Blueprint.node_t(), Blueprint.node_t()) :: Phase.Error.t()
defp directive_error(argument_node, directive_node) do
%Phase.Error{
phase: __MODULE__,
message: directive_error_message(argument_node.name, directive_node.name),
locations: [argument_node.source_location]
}
end
# Generate the error for a field argument
@spec field_error(Blueprint.node_t(), Blueprint.node_t(), String.t()) :: Phase.Error.t()
defp field_error(argument_node, field_node, type_name) do
%Phase.Error{
phase: __MODULE__,
message: field_error_message(argument_node.name, field_node.name, type_name),
locations: [argument_node.source_location]
}
end
@doc """
Generate an error for a directive argument
"""
@spec directive_error_message(String.t(), String.t()) :: String.t()
def directive_error_message(argument_name, directive_name) do
~s(Unknown argument "#{argument_name}" on directive "@#{directive_name}".)
end
@doc """
Generate an error for a field argument
"""
@spec field_error_message(String.t(), String.t(), String.t()) :: String.t()
def field_error_message(argument_name, field_name, type_name) do
~s(Unknown argument "#{argument_name}" on field "#{field_name}" of type "#{type_name}".)
end
end
defmodule Absinthe.Phase.Document.Validation.NoUndefinedVariables do
@moduledoc false
# Validates document to ensure that the only variables that are used in a
# document are defined on the operation.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input.operations))
{:ok, result}
end
def handle_node(%Blueprint.Input.Variable{} = node, operations) do
errors =
for op <- operations do
for var <- op.variable_uses, var.name == node.name do
if Enum.find(op.variable_definitions, &(&1.name == var.name)) do
[]
else
[error(node, op)]
end
end
end
|> List.flatten()
node = %{node | errors: errors ++ node.errors}
case errors do
[] ->
node
_ ->
flag_invalid(node, :no_definition)
end
end
def handle_node(node, _) do
node
end
# Generate the error for the node
@spec error(Blueprint.Input.Variable.t(), Blueprint.Document.Operation.t()) :: Phase.Error.t()
defp error(node, operation) do
%Phase.Error{
phase: __MODULE__,
message: error_message(node.name, operation.name),
locations: [node.source_location, operation.source_location]
}
end
@doc """
Generate an error message for an undefined variable.
"""
@spec error_message(String.t(), nil | String.t()) :: String.t()
def error_message(name, nil) do
~s(Variable "#{name}" is not defined.)
end
def error_message(name, operation_name) do
~s(Variable "#{name}" is not defined by operation "#{operation_name}".)
end
end
defmodule Absinthe.Phase.Document.Validation.LoneAnonymousOperation do
@moduledoc false
# Validates document to ensure that only one operation is provided without
# a name.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node/1)
{:ok, result}
end
# Find the root and check for multiple anonymous operations
@spec handle_node(Blueprint.node_t()) :: Blueprint.node_t()
defp handle_node(%Blueprint{} = node) do
ops = process(node.operations)
%{node | operations: ops}
end
defp handle_node(node) do
node
end
@spec process([Blueprint.Document.Operation.t()]) :: [Blueprint.Document.Operation.t()]
defp process(operations) do
do_process(operations, length(operations))
end
@spec do_process([Blueprint.Document.Operation.t()], integer) :: [
Blueprint.Document.Operation.t()
]
defp do_process(operations, count) when count < 2 do
operations
end
defp do_process(operations, _) do
Enum.map(operations, fn
%{name: nil} = op ->
flag_invalid(op, :bad_name)
|> put_error(error(op))
other ->
other
end)
end
# Generate the error for the node
@spec error(Blueprint.node_t()) :: Phase.Error.t()
defp error(node) do
%Phase.Error{
phase: __MODULE__,
message: "This anonymous operation must be the only defined operation.",
locations: [node.source_location]
}
end
end
defmodule Absinthe.Phase.Document.Validation.NoUnusedVariables do
@moduledoc false
# Validates document to ensure that the only variables that are used in a
# document are defined on the operation.
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input.operations))
{:ok, result}
end
def handle_node(%Blueprint.Document.VariableDefinition{} = node, operations) do
errors =
for op <- operations do
for var <- op.variable_definitions, var.name == node.name do
if Enum.find(op.variable_uses, &(&1.name == var.name)) do
[]
else
[error(node, op)]
end
end
end
|> List.flatten()
case errors do
[] ->
node
errors ->
%{node | errors: errors ++ node.errors}
|> flag_invalid(:unused)
end
end
def handle_node(node, _) do
node
end
# Generate the error for the node
@spec error(Blueprint.Document.VariableDefinition.t(), Blueprint.Document.Operation.t()) ::
Phase.Error.t()
defp error(node, operation) do
%Phase.Error{
phase: __MODULE__,
message: error_message(node.name, operation.name),
locations: Enum.uniq([node.source_location, operation.source_location])
}
end
@doc """
Generate an error message for an unused variable.
"""
@spec error_message(String.t(), nil | String.t()) :: String.t()
def error_message(name, nil) do
~s(Variable "#{name}" is never used.)
end
def error_message(name, operation_name) do
~s(Variable "#{name}" is never used in operation "#{operation_name}".)
end
end
# [sic]
defmodule Absinthe.Phase.Document.Validation.ScalarLeafs do
@moduledoc false
# Validates that all leaf nodes are scalars.
#
# # Examples:
# Assume `user` field is an object, and `email` is a scalar.
#
# ## DO NOT
# ```
# {
# user
# }
# ```
#
# ## DO
# ```
# {
# user {name email}
# }
# ```
#
# ## DO NOT
# ```
# {
# email { fields on scalar }
# }
# ```
#
# ## DO
# ```
# {
# email
# }
# ```
alias Absinthe.{Blueprint, Phase, Type}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input.schema))
{:ok, result}
end
defp handle_node(%{schema_node: nil} = node, _schema), do: {:halt, node}
defp handle_node(%Blueprint.Document.Field{schema_node: schema_node} = node, schema) do
type = Type.expand(schema_node.type, schema)
process(node, Type.unwrap(type), type)
end
defp handle_node(node, _) do
node
end
@has_subfields [
Type.Object,
Type.Union,
Type.Interface
]
defp process(%{selections: []} = node, %unwrapped{}, type) when unwrapped in @has_subfields do
bad_node(node, type, :missing_subfields)
end
defp process(%{selections: s} = node, %unwrapped{}, type)
when s != [] and not (unwrapped in @has_subfields) do
bad_node(node, type, :bad_subfields)
end
defp process(node, _, _) do
node
end
defp bad_node(node, type, :bad_subfields = flag) do
node
|> flag_invalid(flag)
|> put_error(error(node, no_subselection_allowed_message(node.name, Type.name(type))))
end
defp bad_node(node, type, :missing_subfields = flag) do
node
|> flag_invalid(flag)
|> put_error(error(node, required_subselection_message(node.name, Type.name(type))))
end
# Generate the error
@spec error(Blueprint.Document.Field.t(), String.t()) :: Phase.Error.t()
defp error(node, message) do
%Phase.Error{
phase: __MODULE__,
message: message,
locations: [node.source_location]
}
end
@doc """
Generate the error message for an extraneous field subselection.
"""
@spec no_subselection_allowed_message(String.t(), String.t()) :: String.t()
def no_subselection_allowed_message(field_name, type_name) do
~s(Field "#{field_name}" must not have a selection since type "#{type_name}" has no subfields.)
end
@doc """
Generate the error message for a missing field subselection.
"""
@spec required_subselection_message(String.t(), String.t()) :: String.t()
def required_subselection_message(field_name, type_name) do
~s(Field "#{field_name}" of type "#{type_name}" must have a selection of subfields. Did you mean "#{
field_name
} { ... }"?)
end
end
defmodule Absinthe.Phase.Document.Validation.ProvidedNonNullArguments do
@moduledoc false
# Validates document to ensure that all non-null arguments are provided.
alias Absinthe.{Blueprint, Phase, Schema, Type}
use Absinthe.Phase
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input.schema))
{:ok, result}
end
@spec handle_node(Blueprint.node_t(), Schema.t()) :: Blueprint.node_t()
# Missing Arguments
defp handle_node(%Blueprint.Input.Argument{value: nil, flags: %{missing: _}} = node, schema) do
node = node |> put_error(error(node, node.schema_node.type, schema))
{:halt, node}
end
defp handle_node(node, _) do
node
end
# Generate the error for this validation
@spec error(Blueprint.node_t(), Type.t(), Schema.t()) :: Phase.Error.t()
defp error(node, type, schema) do
type_name = Type.name(type, schema)
%Phase.Error{
phase: __MODULE__,
message: error_message(node.name, type_name),
locations: [node.source_location]
}
end
@doc """
Generate the argument error.
"""
@spec error_message(String.t(), String.t()) :: String.t()
def error_message(name, type_name) do
~s(In argument "#{name}": Expected type "#{type_name}", found null.)
end
end
defmodule Absinthe.Phase.Document.Validation.ProvidedNonNullVariables do
@moduledoc false
# Validates document to ensure that all non-null variable definitions
# are provided values.
alias Absinthe.{Blueprint, Phase, Schema}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input.schema))
{:ok, result}
end
# Find variable definitions
@spec handle_node(Blueprint.node_t(), Schema.t()) :: Blueprint.node_t()
defp handle_node(
%Blueprint.Document.VariableDefinition{
type: %Blueprint.TypeReference.NonNull{},
provided_value: nil
} = node,
_
) do
node
|> put_error(error(node))
end
defp handle_node(
%Blueprint.Document.VariableDefinition{
type: %Blueprint.TypeReference.NonNull{},
provided_value: %Blueprint.Input.Null{}
} = node,
_
) do
node
|> put_error(error(node))
end
defp handle_node(node, _) do
node
end
# Generate an error for variable definition
@spec error(Blueprint.Document.VariableDefinition.t()) :: Phase.Error.t()
defp error(node) do
%Phase.Error{
phase: __MODULE__,
message: error_message(node.name),
locations: [node.source_location]
}
end
@doc """
Generate the error message.
"""
@spec error_message(String.t()) :: String.t()
def error_message(variable_name) do
~s(Variable "#{variable_name}": Expected non-null, found null.)
end
end
defmodule Absinthe.Phase.Subscription.SubscribeSelf do
use Absinthe.Phase
alias Absinthe.Phase
@moduledoc false
alias Absinthe.Blueprint
@spec run(any, Keyword.t()) :: {:ok, Blueprint.t()}
def run(blueprint, options) do
with %{type: :subscription} = op <- Blueprint.current_operation(blueprint) do
do_subscription(op, blueprint, options)
else
_ -> {:ok, blueprint}
end
end
def do_subscription(%{type: :subscription} = op, blueprint, options) do
context = blueprint.execution.context
pubsub = ensure_pubsub!(context)
hash = :erlang.phash2(blueprint)
doc_id = "__absinthe__:doc:#{hash}"
%{selections: [field]} = op
with {:ok, field_key} <- get_field_key(field, context) do
Absinthe.Subscription.subscribe(pubsub, field_key, doc_id, blueprint)
{:replace, blueprint, [{Phase.Subscription.Result, topic: doc_id}]}
else
{:error, error} ->
blueprint = update_in(blueprint.execution.validation_errors, &[error | &1])
error_pipeline = [
{Phase.Document.Result, options}
]
{:replace, blueprint, error_pipeline}
end
end
defp get_field_key(%{schema_node: schema_node, argument_data: argument_data} = field, context) do
name = schema_node.identifier
config =
case Absinthe.Type.function(schema_node, :config) do
fun when is_function(fun, 2) ->
apply(fun, [argument_data, %{context: context}])
fun when is_function(fun, 1) ->
IO.write(
:stderr,
"Warning: 1-arity topic functions are deprecated, upgrade to 2 arity before 1.4.0 release"
)
apply(fun, [argument_data])
nil ->
{:ok, topic: Atom.to_string(name)}
end
case config do
{:ok, config} ->
key = find_key!(config)
{:ok, {name, key}}
{:error, msg} ->
error = %Phase.Error{
phase: __MODULE__,
message: msg,
locations: [field.source_location]
}
{:error, error}
val ->
raise """
Invalid return from config function!
Config function must returne `{:ok, config}` or `{:error, msg}`. You returned:
#{inspect(val)}
"""
end
end
defp find_key!(config) do
topic =
config[:topic] ||
raise """
Subscription config must include a non null topic!
#{inspect(config)}
"""
to_string(topic)
end
defp ensure_pubsub!(context) do
case Absinthe.Subscription.extract_pubsub(context) do
{:ok, pubsub} ->
pubsub
_ ->
raise """
Pubsub not configured!
Subscriptions require a configured pubsub module.
"""
end
end
end
defmodule Absinthe.Phase.Subscription.Result do
@moduledoc false
# This runs instead of resolution and the normal result phase after a successful
# subscription
alias Absinthe.Blueprint
@spec run(any, Keyword.t()) :: {:ok, Blueprint.t()}
def run(blueprint, topic: topic) do
result = %{"subscribed" => topic}
{:ok, put_in(blueprint.result, result)}
end
end
defmodule Absinthe.Phase.Parse do
@moduledoc false
use Absinthe.Phase
alias Absinthe.{Language, Phase}
@spec run(Language.Source.t(), Keyword.t()) :: Phase.result_t()
def run(input, options \\ [])
def run(%Absinthe.Blueprint{} = blueprint, options) do
options = Map.new(options)
case parse(blueprint.input) do
{:ok, value} ->
{:ok, %{blueprint | input: value}}
{:error, error} ->
blueprint
|> add_validation_error(error)
|> handle_error(options)
end
end
def run(input, options) do
run(%Absinthe.Blueprint{input: input}, options)
end
defp add_validation_error(bp, error) do
put_in(bp.execution.validation_errors, [error])
end
def handle_error(blueprint, %{jump_phases: true, result_phase: abort_phase}) do
{:jump, blueprint, abort_phase}
end
def handle_error(blueprint, _) do
{:error, blueprint}
end
@spec tokenize(binary) :: {:ok, [tuple]} | {:error, binary}
def tokenize(input) do
case Absinthe.Lexer.tokenize(input) do
{:error, rest, loc} ->
{:error, format_raw_parse_error({:lexer, rest, loc})}
other ->
other
end
end
@spec parse(binary) :: {:ok, Language.Document.t()} | {:error, tuple}
@spec parse(Language.Source.t()) :: {:ok, Language.Document.t()} | {:error, tuple}
defp parse(input) when is_binary(input) do
parse(%Language.Source{body: input})
end
defp parse(input) do
try do
case input.body |> tokenize do
{:ok, []} ->
{:ok, %Language.Document{}}
{:ok, tokens} ->
case :absinthe_parser.parse(tokens) do
{:ok, _doc} = result ->
result
{:error, raw_error} ->
{:error, format_raw_parse_error(raw_error)}
end
other ->
other
end
rescue
error ->
{:error, format_raw_parse_error(error)}
end
end
@spec format_raw_parse_error({integer, :absinthe_parser, [charlist]}) :: Phase.Error.t()
defp format_raw_parse_error({{line, column}, :absinthe_parser, msgs}) do
message = msgs |> Enum.map(&to_string/1) |> Enum.join("")
%Phase.Error{message: message, locations: [%{line: line, column: column}], phase: __MODULE__}
end
@spec format_raw_parse_error({:lexer, String.t(), {line :: pos_integer, column :: pos_integer}}) ::
Phase.Error.t()
defp format_raw_parse_error({:lexer, rest, {line, column}}) do
<<sample::binary-size(10), _::binary>> = rest
message = "Parsing failed at `#{sample}`"
%Phase.Error{message: message, locations: [%{line: line, column: column}], phase: __MODULE__}
end
@unknown_error_msg "An unknown error occurred during parsing"
@spec format_raw_parse_error(map) :: Phase.Error.t()
defp format_raw_parse_error(%{} = error) do
detail =
if Exception.exception?(error) do
": " <> Exception.message(error)
else
""
end
%Phase.Error{message: @unknown_error_msg <> detail, phase: __MODULE__}
end
end
defmodule Absinthe.Phase.Schema.FieldImports do
@moduledoc false
use Absinthe.Phase
alias Absinthe.Blueprint
alias Absinthe.Blueprint.Schema
def run(blueprint, _opts) do
blueprint = Blueprint.prewalk(blueprint, &handle_imports/1)
{:ok, blueprint}
end
def handle_imports(%Schema.SchemaDefinition{} = schema) do
# Per Phase.Schema.ValidateTypeReferences, the types are already
# in the order they need to be in to accumulate imports properly.
types =
Enum.reduce(schema.type_definitions, %{}, fn type, types ->
Map.put(types, type.identifier, import_fields(type, types))
end)
types = Enum.map(schema.type_definitions, &Map.fetch!(types, &1.identifier))
{:halt, %{schema | type_definitions: types}}
end
def handle_imports(node), do: node
@can_import [
Schema.ObjectTypeDefinition,
Schema.InputObjectTypeDefinition,
Schema.InterfaceTypeDefinition
]
def import_fields(%def_type{} = type, types) when def_type in @can_import do
Enum.reduce(type.imports, type, fn {source, opts}, type ->
source_type = Map.fetch!(types, source)
rejections = Keyword.get(opts, :except, [])
fields = source_type.fields |> Enum.reject(&(&1.identifier in rejections))
fields =
case Keyword.fetch(opts, :only) do
{:ok, selections} ->
Enum.filter(fields, &(&1.identifier in selections))
_ ->
fields
end
%{type | fields: fields ++ type.fields}
end)
end
def import_fields(type, _), do: type
end
defmodule Absinthe.Phase.Schema.Decorate do
@moduledoc false
@behaviour __MODULE__.Decorator
use Absinthe.Phase
alias Absinthe.Blueprint
@decorate [
Blueprint.Schema.DirectiveDefinition,
Blueprint.Schema.EnumTypeDefinition,
Blueprint.Schema.EnumValueDefinition,
Blueprint.Schema.FieldDefinition,
Blueprint.Schema.InputObjectTypeDefinition,
Blueprint.Schema.InputValueDefinition,
Blueprint.Schema.InterfaceTypeDefinition,
Blueprint.Schema.ObjectTypeDefinition,
Blueprint.Schema.ScalarTypeDefinition,
Blueprint.Schema.SchemaDefinition,
Blueprint.Schema.UnionTypeDefinition
]
@impl Absinthe.Phase
def run(blueprint, opts \\ []) do
{:ok, schema} = Keyword.fetch(opts, :schema)
decorator = Keyword.get(opts, :decorator, __MODULE__)
blueprint = Blueprint.prewalk(blueprint, &handle_node(&1, [], schema, decorator))
{:ok, blueprint}
end
defp handle_node(%Blueprint{} = node, ancestors, schema, decorator) do
node
|> decorate_node(ancestors, schema, decorator)
|> set_children(ancestors, schema, decorator)
end
defp handle_node(%node_module{} = node, ancestors, schema, decorator)
when node_module in @decorate do
case Absinthe.Type.built_in_module?(node.module) do
true ->
{:halt, node}
false ->
node
|> decorate_node(ancestors, schema, decorator)
|> set_children(ancestors, schema, decorator)
end
end
defp handle_node(node, ancestors, schema, decorator) do
set_children(node, ancestors, schema, decorator)
end
defp set_children(parent, ancestors, schema, decorator) do
Blueprint.prewalk(parent, fn
^parent -> parent
child -> {:halt, handle_node(child, [parent | ancestors], schema, decorator)}
end)
end
defp decorate_node(%{} = node, ancestors, schema, decorator) do
decorations = schema.decorations(node, ancestors)
apply_decorations(node, decorations, decorator)
end
defp decorate_node(node, _ancestors, _schema, _decorator) do
node
end
defp apply_decorations(node, decorations, decorator) do
decorations
|> List.wrap()
|> Enum.reduce(node, fn decoration, node ->
decorator.apply_decoration(node, decoration)
end)
end
@impl __MODULE__.Decorator
def apply_decoration(node, {:description, text}) do
%{node | description: text}
end
def apply_decoration(node, {:resolve, resolver}) do
%{node | middleware: [{Absinthe.Resolution, resolver}]}
end
end
defmodule Absinthe.Phase.Schema.Build do
@moduledoc false
def run(blueprint, _opts) do
%{schema_definitions: [schema]} = blueprint
types = build_types(blueprint)
directives = build_directives(blueprint)
schema = %{schema | type_artifacts: types, directive_artifacts: directives}
blueprint = %{blueprint | schema_definitions: [schema]}
{:ok, blueprint}
end
def build_types(%{schema_definitions: [schema]}) do
for %module{} = type_def <- schema.type_definitions do
type = module.build(type_def, schema)
%{
type
| __reference__: type_def.__reference__,
__private__: type_def.__private__
}
end
end
def build_directives(%{schema_definitions: [schema]}) do
for %module{} = type_def <- schema.directive_definitions do
type = module.build(type_def, schema)
%{
type
| definition: type_def.module,
__reference__: type_def.__reference__,
__private__: type_def.__private__
}
end
end
end
defmodule Absinthe.Phase.Schema.ValidateTypeReferences do
@moduledoc false
use Absinthe.Phase
alias Absinthe.Blueprint
alias Absinthe.Blueprint.Schema
# TODO: actually do the type reference validation.
# Right now it just handles topsorting the types by import
def run(blueprint, _opts) do
blueprint = Blueprint.prewalk(blueprint, &handle_imports/1)
{:ok, blueprint}
end
def handle_imports(%Schema.SchemaDefinition{} = schema) do
types = sort_and_validate_types(schema.type_definitions)
{:halt, %{schema | type_definitions: types}}
end
def handle_imports(node), do: node
def sort_and_validate_types(types) do
graph = :digraph.new([:cyclic])
try do
_ = check(types, graph)
for type <- types do
if cycle = :digraph.get_cycle(graph, type.identifier) do
raise "cycle! #{inspect(cycle)}"
end
end
types = Map.new(types, &{&1.identifier, &1})
graph
|> :digraph_utils.topsort()
|> Enum.reverse()
|> Enum.map(&Map.fetch!(types, &1))
after
:digraph.delete(graph)
end
end
defp check(types, graph) do
Enum.each(types, &add_to_graph(&1, graph))
end
defp add_to_graph(type, graph) do
:digraph.add_vertex(graph, type.identifier)
with %{imports: imports} <- type do
for {ident, _} <- imports do
:digraph.add_vertex(graph, ident)
:digraph.add_edge(graph, type.identifier, ident)
end
end
end
end
defmodule Absinthe.Phase.Schema.Validation do
@moduledoc false
alias Absinthe.Phase
def pipeline do
[
Phase.Validation.KnownDirectives
]
end
end
defmodule Absinthe.Phase.Schema.InlineFunctions do
@moduledoc false
use Absinthe.Phase
alias Absinthe.Blueprint
alias Absinthe.Blueprint.Schema
alias Absinthe.Type
def run(blueprint, _opts) do
blueprint = Blueprint.prewalk(blueprint, &inline_functions(&1, blueprint.schema))
{:ok, blueprint}
end
def inline_functions(%Schema.SchemaDefinition{} = schema_def, schema) do
schema_def = %{
schema_def
| type_artifacts: Enum.map(schema_def.type_artifacts, &inline_functions(&1, schema)),
directive_artifacts:
Enum.map(schema_def.directive_artifacts, &inline_functions(&1, schema))
}
{:halt, schema_def}
end
def inline_functions(%type{identifier: _} = node, schema) do
type
|> Schema.functions()
# middleware gets handled specially
|> Enum.reject(&(&1 in [:middleware]))
|> Enum.reduce(node, &inline_function/2)
|> inline_middleware(schema)
|> Absinthe.Subscription.add_middleware()
end
def inline_functions(node, _) do
node
end
defp inline_function(attr, node) do
function = Type.function(node, attr)
if Absinthe.Utils.escapable?(function) do
%{node | attr => function}
else
node
end
end
def inline_middleware(%Type.Object{} = type, schema) do
Map.update!(type, :fields, fn fields ->
fields =
Enum.map(fields, fn {field_ident, field} ->
{field_ident, inline_functions(field, schema)}
end)
Map.new(fields, fn
{field_ident, %{middleware: {:ref, module, identifier}} = field} ->
middleware = Type.function(field, :middleware)
middleware = Absinthe.Middleware.expand(schema, middleware, field, type)
if Absinthe.Utils.escapable?(middleware) do
{field_ident, %{field | middleware: middleware}}
else
middleware_shim = {
{Absinthe.Middleware, :shim},
{:ref, module, identifier}
}
{field_ident, %{field | middleware: [middleware_shim]}}
end
{field_ident, field} ->
middleware = Absinthe.Middleware.expand(schema, field.middleware, field, type)
{field_ident, %{field | middleware: middleware}}
end)
end)
end
def inline_middleware(type, _) do
type
end
end
defmodule Absinthe.Phase.Schema.TypeImports do
@moduledoc false
def run(blueprint, _opts) do
blueprint =
Map.update!(blueprint, :schema_definitions, fn schemas ->
for schema <- schemas, do: handle_imports(schema)
end)
{:ok, blueprint}
end
@default_imports [
{Absinthe.Type.BuiltIns.Scalars, []},
{Absinthe.Type.BuiltIns.Directives, []},
{Absinthe.Type.BuiltIns.Introspection, []}
]
def handle_imports(schema) do
types = do_imports(@default_imports ++ schema.imports, schema.type_definitions)
# special casing the import of the built in directives
[builtins] = Absinthe.Type.BuiltIns.Directives.__absinthe_blueprint__().schema_definitions
directives = schema.directive_definitions ++ builtins.directive_definitions
%{schema | type_definitions: types, directive_definitions: directives}
end
defp do_imports([], types) do
types
end
defp do_imports([{module, opts} | rest], acc) do
[other_def] = module.__absinthe_blueprint__.schema_definitions
rejections = MapSet.new([:query, :mutation, :subscription] ++ Keyword.get(opts, :except, []))
types = Enum.reject(other_def.type_definitions, &(&1.identifier in rejections))
case Keyword.fetch(opts, :only) do
{:ok, selections} ->
Enum.filter(types, &(&1.identifier in selections))
_ ->
types
end
do_imports(other_def.imports ++ rest, types ++ acc)
end
end
defmodule Absinthe.Phase.Schema.RegisterTriggers do
@moduledoc false
use Absinthe.Phase
alias Absinthe.Blueprint
alias Absinthe.Blueprint.Schema
def run(blueprint, _opts) do
%{schema_definitions: [schema]} = blueprint
subscription_object =
Enum.find(schema.type_definitions, fn type ->
type.identifier == :subscription
end)
mutation_object =
Enum.find(schema.type_definitions, fn type ->
type.identifier == :mutation
end)
mutation_object =
if subscription_object && mutation_object do
register_triggers(mutation_object, subscription_object.fields)
else
# TODO: return errors if there isn't a mutation field that is on the
# triggers list
mutation_object
end
schema =
Map.update!(schema, :type_definitions, fn definitions ->
Enum.map(definitions, fn
%{identifier: :subscription} -> subscription_object
%{identifier: :mutation} -> mutation_object
type -> type
end)
end)
{:ok, %{blueprint | schema_definitions: [schema]}}
end
defp register_triggers(mutation_object, sub_fields) do
Map.update!(mutation_object, :fields, fn mut_fields ->
for mut_field <- mut_fields do
triggers =
for sub_field <- sub_fields,
sub_triggers = Absinthe.Type.function(sub_field, :triggers),
Map.has_key?(sub_triggers, mut_field.identifier),
do: sub_field.identifier
%{mut_field | triggers: triggers}
end
end)
end
end
defmodule Absinthe.Phase.Schema.Decorate.Decorator do
@callback apply_decoration(node :: Absinthe.Blueprint.Schema.t(), decoration :: any) ::
Absinthe.Blueprint.Schema.t()
end
defmodule Absinthe.Phase.Schema.Compile do
@moduledoc false
alias Absinthe.Blueprint.Schema
def run(blueprint, opts) do
module_name = Module.concat(opts[:module], Compiled)
%{schema_definitions: [schema]} = blueprint
type_ast = build_types(schema.type_artifacts)
directive_ast = build_directives(schema.directive_artifacts)
type_list =
Map.new(schema.type_definitions, fn type_def ->
{type_def.identifier, type_def.name}
end)
directive_list =
Map.new(schema.directive_definitions, fn type_def ->
{type_def.identifier, type_def.name}
end)
metadata = build_metadata(schema)
implementors = build_implementors(schema)
body = [
type_ast,
directive_ast,
quote do
def __absinthe_types__ do
unquote(Macro.escape(type_list))
end
def __absinthe_directives__() do
unquote(Macro.escape(directive_list))
end
def __absinthe_interface_implementors__() do
unquote(Macro.escape(implementors))
end
end,
metadata
]
Module.create(module_name, body, Macro.Env.location(__ENV__))
{:ok, blueprint}
end
def build_metadata(schema) do
for type <- schema.type_definitions do
quote do
def __absinthe_reference__(unquote(type.identifier)) do
unquote(Macro.escape(type.__reference__))
end
end
end
end
def build_types(types) do
for type <- types do
if !type.definition,
do:
raise("""
No definition set!
#{inspect(type)}
""")
ast = Macro.escape(type)
quote do
def __absinthe_type__(unquote(type.identifier)) do
unquote(ast)
end
def __absinthe_type__(unquote(type.name)) do
unquote(ast)
end
end
end
|> Enum.concat([
quote do
def __absinthe_type__(_type) do
nil
end
end
])
end
def build_directives(directives) do
for type <- directives do
ast = Macro.escape(type)
quote do
def __absinthe_directive__(unquote(type.identifier)) do
unquote(ast)
end
def __absinthe_directive__(unquote(type.name)) do
unquote(ast)
end
end
end
|> Enum.concat([
quote do
def __absinthe_directive__(_type) do
nil
end
end
])
end
defp build_implementors(schema) do
schema.type_definitions
|> Enum.filter(&match?(%Schema.InterfaceTypeDefinition{}, &1))
|> Map.new(fn iface ->
implementors =
for %Schema.ObjectTypeDefinition{} = obj <- schema.type_definitions,
iface.identifier in obj.interfaces,
do: obj.identifier
{iface.identifier, Enum.sort(implementors)}
end)
end
end
defmodule Absinthe.Phase.Schema.Validation.Result do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _opts) do
{input, errors} = Blueprint.prewalk(input, [], &handle_node/2)
errors = :lists.reverse(errors)
case errors do
[] ->
{:ok, input}
_ ->
{:error, errors}
end
end
# Collect the validation errors from nodes
@spec handle_node(Blueprint.node_t(), [Phase.Error.t()]) ::
{Blueprint.node_t(), [Phase.Error.t()]}
defp handle_node(%{errors: errs} = node, errors) do
{node, :lists.reverse(errs) ++ errors}
end
defp handle_node(%{raw: raw} = node, errors) do
{_, errors} = Blueprint.prewalk(raw, errors, &handle_node/2)
{node, errors}
end
defp handle_node(node, acc), do: {node, acc}
end
defmodule Absinthe.Phase.Schema do
@moduledoc false
# Populate all schema nodes and the adapter for the blueprint tree. If the
# blueprint tree is a _schema_ tree, this schema is the meta schema (source of
# IDL directives, etc).
#
# Note that no validation occurs in this phase.
use Absinthe.Phase
alias Absinthe.{Blueprint, Type, Schema}
# The approach here is pretty simple.
# We start at the top blueprint node and set the appropriate schema node on operations
# directives and so forth.
#
# Then, as `prewalk` walks down the tree we hit a node. If that node has a schema_node
# set by its parent, we walk to its children and set the schema node on those children.
# We do not need to walk any further because `prewalk` will do that for us.
#
# Thus at each node we need only concern ourselves with immediate children.
@spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, options \\ []) do
schema = Keyword.fetch!(options, :schema)
adapter = Keyword.get(options, :adapter, Absinthe.Adapter.LanguageConventions)
result =
input
|> update_context(schema)
|> Blueprint.prewalk(&handle_node(&1, schema, adapter))
{:ok, result}
end
defp update_context(input, nil), do: input
defp update_context(input, schema) do
context = schema.context(input.execution.context)
put_in(input.execution.context, context)
end
defp handle_node(%Blueprint{} = node, schema, adapter) do
set_children(%{node | schema: schema, adapter: adapter}, schema, adapter)
end
defp handle_node(%Absinthe.Blueprint.Document.VariableDefinition{} = node, _, _) do
{:halt, node}
end
defp handle_node(node, schema, adapter) do
set_children(node, schema, adapter)
end
defp set_children(parent, schema, adapter) do
Blueprint.prewalk(parent, fn
^parent -> parent
%Absinthe.Blueprint.Input.Variable{} = child -> {:halt, child}
child -> {:halt, set_schema_node(child, parent, schema, adapter)}
end)
end
# Do note, the `parent` arg is the parent blueprint node, not the parent's schema node.
defp set_schema_node(
%Blueprint.Document.Fragment.Inline{type_condition: %{name: type_name} = condition} =
node,
_parent,
schema,
_adapter
) do
schema_node = Absinthe.Schema.lookup_type(schema, type_name)
%{node | schema_node: schema_node, type_condition: %{condition | schema_node: schema_node}}
end
defp set_schema_node(%Blueprint.Directive{name: name} = node, _parent, schema, adapter) do
schema_node =
name
|> adapter.to_internal_name(:directive)
|> schema.__absinthe_directive__
%{node | schema_node: schema_node}
end
defp set_schema_node(
%Blueprint.Document.Operation{type: op_type} = node,
_parent,
schema,
_adapter
) do
%{node | schema_node: Absinthe.Schema.lookup_type(schema, op_type)}
end
defp set_schema_node(
%Blueprint.Document.Fragment.Named{type_condition: %{name: type_name} = condition} =
node,
_parent,
schema,
_adapter
) do
schema_node = Absinthe.Schema.lookup_type(schema, type_name)
%{node | schema_node: schema_node, type_condition: %{condition | schema_node: schema_node}}
end
defp set_schema_node(
%Blueprint.Document.VariableDefinition{type: type_reference} = node,
_parent,
schema,
_adapter
) do
wrapped =
type_reference
|> type_reference_to_type(schema)
wrapped
|> Type.unwrap()
|> case do
nil -> node
_ -> %{node | schema_node: wrapped}
end
end
defp set_schema_node(node, %{schema_node: nil}, _, _) do
# if we don't know the parent schema node, and we aren't one of the earlier nodes,
# then we can't know our schema node.
node
end
defp set_schema_node(
%Blueprint.Document.Fragment.Inline{type_condition: nil} = node,
parent,
schema,
adapter
) do
type =
case parent.schema_node do
%{type: type} -> type
other -> other
end
|> Type.expand(schema)
|> Type.unwrap()
set_schema_node(
%{node | type_condition: %Blueprint.TypeReference.Name{name: type.name, schema_node: type}},
parent,
schema,
adapter
)
end
defp set_schema_node(%Blueprint.Document.Field{} = node, parent, schema, adapter) do
%{node | schema_node: find_schema_field(parent.schema_node, node.name, schema, adapter)}
end
defp set_schema_node(%Blueprint.Input.Argument{name: name} = node, parent, _schema, adapter) do
%{node | schema_node: find_schema_argument(parent.schema_node, name, adapter)}
end
defp set_schema_node(%Blueprint.Document.Fragment.Spread{} = node, _, _, _) do
node
end
defp set_schema_node(%Blueprint.Input.Field{} = node, parent, schema, adapter) do
case node.name do
"__" <> _ ->
%{node | schema_node: nil}
name ->
%{node | schema_node: find_schema_field(parent.schema_node, name, schema, adapter)}
end
end
defp set_schema_node(%Blueprint.Input.List{} = node, parent, _schema, _adapter) do
case Type.unwrap_non_null(parent.schema_node) do
%{of_type: internal_type} ->
%{node | schema_node: internal_type}
_ ->
node
end
end
defp set_schema_node(%Blueprint.Input.Value{} = node, parent, schema, _) do
case parent.schema_node do
%Type.Argument{type: type} ->
%{node | schema_node: type |> Type.expand(schema)}
%Absinthe.Type.Field{type: type} ->
%{node | schema_node: type |> Type.expand(schema)}
type ->
%{node | schema_node: type |> Type.expand(schema)}
end
end
defp set_schema_node(%{schema_node: nil} = node, %Blueprint.Input.Value{} = parent, _schema, _) do
%{node | schema_node: parent.schema_node}
end
defp set_schema_node(node, _, _, _) do
node
end
# Given a schema field or directive, lookup a child argument definition
@spec find_schema_argument(
nil | Type.Field.t() | Type.Argument.t(),
String.t(),
Absinthe.Adapter.t()
) :: nil | Type.Argument.t()
defp find_schema_argument(%{args: arguments}, name, adapter) do
internal_name = adapter.to_internal_name(name, :argument)
arguments
|> Map.values()
|> Enum.find(&match?(%{name: ^internal_name}, &1))
end
# Given a schema type, lookup a child field definition
@spec find_schema_field(nil | Type.t(), String.t(), Absinthe.Schema.t(), Absinthe.Adapter.t()) ::
nil | Type.Field.t()
defp find_schema_field(_, "__" <> introspection_field, _, _) do
Absinthe.Introspection.Field.meta(introspection_field)
end
defp find_schema_field(%{of_type: type}, name, schema, adapter) do
find_schema_field(type, name, schema, adapter)
end
defp find_schema_field(%{fields: fields}, name, _schema, adapter) do
internal_name = adapter.to_internal_name(name, :field)
fields
|> Map.values()
|> Enum.find(&match?(%{name: ^internal_name}, &1))
end
defp find_schema_field(%Type.Field{type: maybe_wrapped_type}, name, schema, adapter) do
type =
Type.unwrap(maybe_wrapped_type)
|> schema.__absinthe_lookup__
find_schema_field(type, name, schema, adapter)
end
defp find_schema_field(_, _, _, _) do
nil
end
@type_mapping %{
Blueprint.TypeReference.List => Type.List,
Blueprint.TypeReference.NonNull => Type.NonNull
}
defp type_reference_to_type(%Blueprint.TypeReference.Name{name: name}, schema) do
Schema.lookup_type(schema, name)
end
for {blueprint_type, core_type} <- @type_mapping do
defp type_reference_to_type(%unquote(blueprint_type){} = node, schema) do
inner = type_reference_to_type(node.of_type, schema)
%unquote(core_type){of_type: inner}
end
end
end
defmodule Absinthe.Phase.Validation do
@moduledoc false
alias Absinthe.Blueprint
defmacro __using__(_) do
quote do
import unquote(__MODULE__).Helpers
end
end
defmodule Helpers do
@spec any_invalid?([Blueprint.node_t()]) :: boolean
def any_invalid?(nodes) do
Enum.any?(nodes, fn
%{flags: %{invalid: _}} ->
true
_ ->
false
end)
end
end
end
defmodule Absinthe.Phase.Debug do
use Absinthe.Phase
@moduledoc false
alias Absinthe.Blueprint
@spec run(any, Keyword.t()) :: {:ok, Blueprint.t()}
def run(input, _options \\ []) do
if System.get_env("DEBUG") do
IO.inspect(input)
end
{:ok, input}
end
end
defmodule Absinthe.Phase.Validation.KnownDirectives do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.postwalk(input, &handle_node/1)
{:ok, result}
end
defp handle_node(%Blueprint.Directive{schema_node: nil} = node) do
node
|> put_error(error_unknown(node))
end
defp handle_node(%Blueprint.Directive{} = node) do
node
end
defp handle_node(%{directives: []} = node) do
node
end
defp handle_node(%{directives: _} = node) do
node
|> check_directives
|> inherit_invalid(node.directives, :bad_directive)
end
defp handle_node(node) do
node
end
defp check_directives(node) do
placement = Blueprint.Directive.placement(node)
directives =
for directive <- node.directives do
if directive.schema_node do
if placement in directive.schema_node.locations do
directive
else
directive
|> put_error(error_misplaced(directive, placement))
|> flag_invalid(:bad_placement)
end
else
directive
end
end
%{node | directives: directives}
end
# Generate the error for the node
@spec error_unknown(Blueprint.node_t()) :: Phase.Error.t()
defp error_unknown(node) do
%Phase.Error{
phase: __MODULE__,
message: "Unknown directive.",
locations: [node.source_location]
}
end
@spec error_misplaced(Blueprint.node_t(), atom) :: Phase.Error.t()
defp error_misplaced(node, placement) do
placement_name = placement |> to_string |> String.upcase()
%Phase.Error{
phase: __MODULE__,
message: "May not be used on #{placement_name}.",
locations: [node.source_location]
}
end
end
defmodule Absinthe.Phase.Validation.KnownTypeNames do
@moduledoc false
# Ensure type names actually exist in the schema.
#
# Type names show up for example in fragments:
#
# ```
# fragment foo on Foo {
# name
# }
# ```
alias Absinthe.{Blueprint, Phase}
use Absinthe.Phase
use Absinthe.Phase.Validation
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.postwalk(input, &handle_node(&1, input.schema))
{:ok, result}
end
defp handle_node(%{type_condition: type, schema_node: nil} = node, _) when not is_nil(type) do
name = Blueprint.TypeReference.unwrap(type).name
node
|> flag_invalid(:bad_type_name)
|> put_error(error(node, name))
end
defp handle_node(%Blueprint.Document.VariableDefinition{schema_node: nil} = node, schema) do
name = Blueprint.TypeReference.unwrap(node.type).name
inner_schema_type = schema.__absinthe_lookup__(name)
if inner_schema_type do
node
else
node
|> flag_invalid(:bad_type_name)
|> put_error(error(node, name))
end
end
defp handle_node(node, _) do
node
end
@spec error(Blueprint.node_t(), String.t()) :: Phase.Error.t()
defp error(node, name) do
%Phase.Error{
phase: __MODULE__,
message: ~s(Unknown type "#{name}".),
locations: [node.source_location]
}
end
end
defmodule Absinthe.Type do
@moduledoc false
alias __MODULE__
alias Absinthe.{Introspection, Schema}
@type function_identifier :: {module, any}
@type function_ref :: {:ref, module, function_identifier}
# ALL TYPES
@type_modules [
Type.Scalar,
Type.Object,
Type.Interface,
Type.Union,
Type.Enum,
Type.InputObject,
Type.List,
Type.NonNull
]
@typedoc "The types that can be custom-built in a schema"
@type custom_t ::
Type.Scalar.t()
| Type.Object.t()
| Type.Field.t()
| Type.Interface.t()
| Type.Union.t()
| Type.Enum.t()
| Type.InputObject.t()
@typedoc "All the possible types"
@type t :: custom_t | wrapping_t
@typedoc "A type identifier"
@type identifier_t :: atom
@typedoc "A type reference"
@type reference_t :: identifier_t | t
def function(type, key) do
case Map.fetch!(type, key) do
{:ref, module, identifier} ->
module.__absinthe_function__(identifier, key)
function ->
function
end
end
@doc "Lookup a custom metadata field on a type"
@spec meta(custom_t, atom) :: nil | any
def meta(%{__private__: store}, key) do
get_in(store, [:meta, key])
end
@doc "Return all custom metadata on a type"
@spec meta(custom_t) :: map
def meta(%{__private__: store}) do
Keyword.get(store, :meta, [])
|> Enum.into(%{})
end
@doc "Determine if a struct matches one of the types"
@spec type?(any) :: boolean
def type?(%{__struct__: mod}) when mod in @type_modules, do: true
def type?(_), do: false
@doc "Determine whether a field/argument is deprecated"
@spec deprecated?(Type.Field.t() | Type.Argument.t()) :: boolean
def deprecated?(%{deprecation: nil}), do: false
def deprecated?(%{deprecation: _}), do: true
def equal?(%{name: name}, %{name: name}), do: true
def equal?(_, _), do: false
def built_in?(type) do
type.definition
|> built_in_module?()
end
def built_in_module?(module) do
module
|> Module.split()
|> Enum.take(3)
|> Module.concat() == Absinthe.Type.BuiltIns
end
# INPUT TYPES
@input_type_modules [Type.Scalar, Type.Enum, Type.InputObject, Type.List, Type.NonNull]
@typedoc "These types may be used as input types for arguments and directives."
@type input_t ::
Type.Scalar.t()
| Type.Enum.t()
| Type.InputObject.t()
| Type.List.t()
| Type.NonNull.t()
@doc "Determine if a term is an input type"
@spec input_type?(any) :: boolean
def input_type?(term) do
term
|> named_type
|> do_input_type?
end
defp do_input_type?(%{__struct__: mod}) when mod in @input_type_modules, do: true
defp do_input_type?(_), do: false
# OBJECT TYPE
@doc "Determine if a term is an object type"
@spec object_type?(any) :: boolean
def object_type?(%Type.Object{}), do: true
def object_type?(_), do: false
@doc "Resolve a type for a value from an interface (if necessary)"
@spec resolve_type(t, any) :: t
def resolve_type(%{resolve_type: resolver}, value), do: resolver.(value)
def resolve_type(type, _value), do: type
# TYPE WITH FIELDS
@doc "Determine if a type has fields"
@spec fielded?(any) :: boolean
def fielded?(%{fields: _}), do: true
def fielded?(_), do: false
# OUTPUT TYPES
@output_type_modules [Type.Scalar, Type.Object, Type.Interface, Type.Union, Type.Enum]
@typedoc "These types may be used as output types as the result of fields."
@type output_t ::
Type.Scalar.t() | Type.Object.t() | Type.Interface.t() | Type.Union.t() | Type.Enum.t()
@doc "Determine if a term is an output type"
@spec output_type?(any) :: boolean
def output_type?(term) do
term
|> named_type
|> do_output_type?
end
defp do_output_type?(%{__struct__: mod}) when mod in @output_type_modules, do: true
defp do_output_type?(_), do: false
# LEAF TYPES
@leaf_type_modules [Type.Scalar, Type.Enum]
@typedoc "These types may describe types which may be leaf values."
@type leaf_t :: Type.Scalar.t() | Type.Enum.t()
@doc "Determine if a term is a leaf type"
@spec leaf_type?(any) :: boolean
def leaf_type?(term) do
term
|> named_type
|> do_leaf_type?
end
defp do_leaf_type?(%{__struct__: mod}) when mod in @leaf_type_modules, do: true
defp do_leaf_type?(_), do: false
# COMPOSITE TYPES
@composite_type_modules [Type.Object, Type.Interface, Type.Union]
@typedoc "These types may describe the parent context of a selection set."
@type composite_t :: Type.Object.t() | Type.Interface.t() | Type.Union.t()
@doc "Determine if a term is a composite type"
@spec composite_type?(any) :: boolean
def composite_type?(%{__struct__: mod}) when mod in @composite_type_modules, do: true
def composite_type?(_), do: false
# ABSTRACT TYPES
@abstract_type_modules [Type.Interface, Type.Union]
@typedoc "These types may describe the parent context of a selection set."
@type abstract_t :: Type.Interface.t() | Type.Union.t()
@doc "Determine if a term is an abstract type"
@spec abstract?(any) :: boolean
def abstract?(%{__struct__: mod}) when mod in @abstract_type_modules, do: true
def abstract?(_), do: false
# NULLABLE TYPES
# @nullable_type_modules [Type.Scalar, Type.Object, Type.Interface, Type.Union, Type.Enum, Type.InputObject, Type.List]
@typedoc "These types can all accept null as a value."
@type nullable_t ::
Type.Scalar.t()
| Type.Object.t()
| Type.Interface.t()
| Type.Union.t()
| Type.Enum.t()
| Type.InputObject.t()
| Type.List.t()
@doc "Unwrap the underlying nullable type or return unmodified"
# nullable_t is a subset of t, but broken out for clarity
@spec nullable(any) :: nullable_t | t
def nullable(%Type.NonNull{of_type: nullable}), do: nullable
def nullable(term), do: term
@doc "Determine if a type is non null"
@spec non_null?(t) :: boolean
def non_null?(%Type.NonNull{}), do: true
def non_null?(_), do: false
# NAMED TYPES
@named_type_modules [
Type.Scalar,
Type.Object,
Type.Interface,
Type.Union,
Type.Enum,
Type.InputObject
]
@typedoc "These named types do not include modifiers like Absinthe.Type.List or Absinthe.Type.NonNull."
@type named_t ::
Type.Scalar.t()
| Type.Object.t()
| Type.Interface.t()
| Type.Union.t()
| Type.Enum.t()
| Type.InputObject.t()
@doc "Determine the underlying named type, if any"
@spec named_type(any) :: nil | named_t
def named_type(%{__struct__: mod, of_type: unmodified}) when mod in [Type.List, Type.NonNull] do
named_type(unmodified)
end
def named_type(%{__struct__: mod} = term) when mod in @named_type_modules, do: term
def named_type(_), do: nil
@doc "Determine if a type is named"
@spec named?(t) :: boolean
def named?(%{name: _}), do: true
def named?(_), do: false
# WRAPPERS
@wrapping_modules [Type.List, Type.NonNull]
@typedoc "A type wrapped in a List on NonNull"
@type wrapping_t :: Type.List.t() | Type.NonNull.t()
@spec wrapped?(t) :: boolean
def wrapped?(%{__struct__: mod}) when mod in @wrapping_modules, do: true
def wrapped?(_), do: false
@doc "Unwrap a type from a List or NonNull"
@spec unwrap(wrapping_t) :: custom_t
@spec unwrap(type) :: type when type: custom_t
def unwrap(%{of_type: t}), do: unwrap(t)
def unwrap(type), do: type
@doc "Unwrap a type from NonNull"
@spec unwrap_non_null(Type.NonNull.t()) :: custom_t
@spec unwrap_non_null(type) :: type when type: custom_t | Type.List.t()
def unwrap_non_null(%Type.NonNull{of_type: t}), do: unwrap_non_null(t)
def unwrap_non_null(type), do: type
@doc """
Get the GraphQL name for a (possibly wrapped) type, expanding
any references if necessary using the provided schema.
"""
@spec name(reference_t, Schema.t()) :: String.t()
def name(ref, schema) do
expanded = expand(ref, schema)
name(expanded)
end
@doc """
Get the GraphQL name for a (possibly wrapped) type.
Note: Use `name/2` if the provided type reference needs to
be expanded to resolve any atom type references.
"""
@spec name(wrapping_t | t) :: String.t()
def name(%Type.NonNull{of_type: contents}) do
name(contents) <> "!"
end
def name(%Type.List{of_type: contents}) do
"[" <> name(contents) <> "]"
end
def name(%{name: name}) do
name
end
@doc "Expand any atom type references inside a List or NonNull"
@spec expand(reference_t, Schema.t()) :: wrapping_t | t
def expand(ref, schema) when is_atom(ref) do
schema.__absinthe_lookup__(ref)
end
def expand(%{of_type: contents} = ref, schema) do
%{ref | of_type: expand(contents, schema)}
end
def expand(type, _) do
type
end
# INTROSPECTION TYPE
@spec introspection?(t) :: boolean
def introspection?(%{name: "__" <> _}) do
true
end
def introspection?(_) do
false
end
# VALUE TYPE
@spec value_type(t, Schema.t()) :: Type.t()
def value_type(%Type.Field{} = node, schema) do
Type.expand(node.type, schema)
end
def value_type(type, schema) do
Type.expand(type, schema)
end
# VALID TYPE
def valid_input?(%Type.NonNull{}, nil) do
false
end
def valid_input?(%Type.NonNull{of_type: internal_type}, value) do
valid_input?(internal_type, value)
end
def valid_input?(_type, nil) do
true
end
def valid_input?(%{parse: parse}, value) do
case parse.(value) do
{:ok, _} -> true
:error -> false
end
end
def valid_input?(_, _) do
true
end
def field(_type, "__" <> meta_name) do
Introspection.Field.meta(meta_name)
end
def field(%{fields: fields}, name) do
fields
|> Map.get(name |> String.to_existing_atom())
rescue
ArgumentError -> nil
end
def field(_, _name) do
nil
end
@spec referenced_types(t, Schema.t()) :: [t]
def referenced_types(type, schema) do
referenced_types(type, schema, MapSet.new())
end
defp referenced_types(%Type.Argument{type: type}, schema, acc) do
referenced_types(type, schema, acc)
end
defp referenced_types(%Type.Directive{} = type, schema, acc) do
type.args
|> Map.values()
|> Enum.reduce(acc, &referenced_types(&1.type, schema, &2))
end
defp referenced_types(%Type.Enum{identifier: identifier}, _schema, acc) do
MapSet.put(acc, identifier)
end
defp referenced_types(%Type.Field{} = field, schema, acc) do
acc =
field.args
|> Map.values()
|> Enum.reduce(acc, &referenced_types(&1, schema, &2))
referenced_types(field.type, schema, acc)
end
defp referenced_types(%Type.InputObject{identifier: identifier} = input_object, schema, acc) do
if identifier in acc do
acc
else
acc = MapSet.put(acc, identifier)
input_object.fields
|> Map.values()
|> Enum.reduce(acc, &referenced_types(&1, schema, &2))
end
end
defp referenced_types(%Type.Interface{identifier: identifier} = interface, schema, acc) do
if identifier in acc do
acc
else
acc = MapSet.put(acc, identifier)
acc =
interface.fields
|> Map.values()
|> Enum.reduce(acc, &referenced_types(&1, schema, &2))
schema
|> Absinthe.Schema.implementors(identifier)
|> Enum.reduce(acc, &referenced_types(&1, schema, &2))
end
end
defp referenced_types(%Type.List{of_type: inner_type}, schema, acc) do
referenced_types(inner_type, schema, acc)
end
defp referenced_types(%Type.NonNull{of_type: inner_type}, schema, acc) do
referenced_types(inner_type, schema, acc)
end
defp referenced_types(%Type.Object{identifier: identifier} = object, schema, acc) do
if identifier in acc do
acc
else
acc = MapSet.put(acc, identifier)
acc =
object.fields
|> Map.values()
|> Enum.reduce(acc, &referenced_types(&1, schema, &2))
object.interfaces
|> Enum.reduce(acc, &referenced_types(&1, schema, &2))
end
end
defp referenced_types(%Type.Reference{} = ref, schema, acc) do
referenced_types(ref.identifier, schema, acc)
end
defp referenced_types(%Type.Scalar{identifier: identifier}, _schema, acc) do
MapSet.put(acc, identifier)
end
defp referenced_types(%Type.Union{identifier: identifier} = union, schema, acc) do
if identifier in acc do
acc
else
acc = MapSet.put(acc, identifier)
union.types
|> Enum.reduce(acc, &referenced_types(&1, schema, &2))
end
end
defp referenced_types(type, schema, acc) when is_atom(type) and type != nil do
referenced_types(Schema.lookup_type(schema, type), schema, acc)
end
end
defmodule Absinthe.Test do
@doc """
Run the introspection query on a schema.
In your `test_helper.exs` file add
```
Absinthe.Test.prime(MyApp.Schema)
```
## Explanation
In the test environment mix loads code lazily, which means that it isn't until
the first GraphQL query in your test suite runs that Absinthe's code base is
actually loaded. Absinthe is a lot of code, and so this can take several
milliseconds. This can be a problem for tests using message passing that expect
messages to happen within a certain amount of time.
By running the introspection query on your schema this function will cause mix
to load the majority of the Absinthe code base.
"""
def prime(schema_name) do
{:ok, %{data: _}} = Absinthe.Schema.introspect(schema_name)
:ok
end
end
defmodule Absinthe.Logger do
@default_log true
@default_filter_variables ~w(token password)
@default_pipeline false
@moduledoc """
Handles logging of Absinthe-specific events.
## Variable filtering
Absinthe can filter out sensitive information like tokens and passwords
during logging. They are replaced by `"[FILTERED]"`.
Use the `:filter_variables` configuration setting for this module.
For example:
config :absinthe, Absinthe.Logger,
filter_variables: ["token", "password", "secret"]
With the configuration above, Absinthe will filter any variable whose name
includes the terms `token`, `password`, or `secret`. The match is case
sensitive.
The default is `#{inspect(@default_filter_variables)}`.
## Pipeline display
Absinthe can optionally display the list of pipeline phases for each processed
document when logging. To enable this feature, set the `:pipeline`
configuration option for this module:
config :absinthe, Absinthe.Logger,
pipeline: true
The default is `#{inspect(@default_pipeline)}`.
## Disabling
To disable Absinthe logging, set the `:log` configuration option to `false`:
config :absinthe,
log: false
The default is `#{inspect(@default_log)}`.
"""
require Logger
@doc """
Log a document being processed.
"""
@spec log_run(
level :: Logger.level(),
{doc :: Absinthe.Pipeline.data_t(), schema :: Absinthe.Schema.t(),
pipeline :: Absinthe.Pipeline.t(), opts :: Keyword.t()}
) :: :ok
def log_run(level, {doc, schema, pipeline, opts}) do
if Application.get_env(:absinthe, :log, @default_log) do
Logger.log(level, fn ->
[
"ABSINTHE",
" schema=",
inspect(schema),
" variables=",
variables_body(opts),
pipeline_section(pipeline),
"---",
?\n,
document(doc),
?\n,
"---"
]
end)
end
:ok
end
@doc false
@spec document(Absinthe.Pipeline.data_t()) :: iolist
def document(value) when value in ["", nil] do
"[EMPTY]"
end
def document(%Absinthe.Blueprint{name: nil}) do
"[COMPILED]"
end
def document(%Absinthe.Blueprint{name: name}) do
"[COMPILED#<#{name}>]"
end
def document(%Absinthe.Language.Source{body: body}) do
document(body)
end
def document(document) when is_binary(document) do
String.trim(document)
end
def document(other) do
inspect(other)
end
@doc false
@spec filter_variables(map) :: map
@spec filter_variables(map, [String.t()]) :: map
def filter_variables(data, filter_variables \\ variables_to_filter())
def filter_variables(%{__struct__: mod} = struct, _filter_variables) when is_atom(mod) do
struct
end
def filter_variables(%{} = map, filter_variables) do
Enum.into(map, %{}, fn {k, v} ->
if is_binary(k) and String.contains?(k, filter_variables) do
{k, "[FILTERED]"}
else
{k, filter_variables(v, filter_variables)}
end
end)
end
def filter_variables([_ | _] = list, filter_variables) do
Enum.map(list, &filter_variables(&1, filter_variables))
end
def filter_variables(other, _filter_variables), do: other
@spec variables_to_filter() :: [String.t()]
defp variables_to_filter do
Application.get_env(:absinthe, __MODULE__, [])
|> Keyword.get(:filter_variables, @default_filter_variables)
end
@spec variables_body(Keyword.t()) :: String.t()
defp variables_body(opts) do
Keyword.get(opts, :variables, %{})
|> filter_variables()
|> inspect()
end
@spec pipeline_section(Absinthe.Pipeline.t()) :: iolist
defp pipeline_section(pipeline) do
Application.get_env(:absinthe, __MODULE__, [])
|> Keyword.get(:pipeline, @default_pipeline)
|> case do
true ->
do_pipeline_section(pipeline)
false ->
?\n
end
end
@spec do_pipeline_section(Absinthe.Pipeline.t()) :: iolist
defp do_pipeline_section(pipeline) do
[
" pipeline=",
pipeline
|> Enum.map(fn
{mod, _} -> mod
mod -> mod
end)
|> inspect,
?\n
]
end
end
defmodule Absinthe.Phase do
@moduledoc """
Behaviour for Absinthe Phases.
A phase takes an `Absinthe.Blueprint` document and returns another blueprint document.
All validation, resolution, and result building happens via phases. See
`Absinthe.Pipeline` for information on how to run phases. See the code under
this namespace for information on individual phases.
"""
@type t :: module
@type result_t ::
{:ok, any}
| {:jump, any, t}
| {:insert, any, t | [t]}
| {:replace, any, t | [t]}
| {:error, any}
alias __MODULE__
alias Absinthe.Blueprint
defmacro __using__(_) do
quote do
@behaviour Phase
import(unquote(__MODULE__))
@spec flag_invalid(Blueprint.node_t()) :: Blueprint.node_t()
def flag_invalid(%{flags: _} = node) do
Absinthe.Blueprint.put_flag(node, :invalid, __MODULE__)
end
@spec flag_invalid(Blueprint.node_t(), atom) :: Blueprint.node_t()
def flag_invalid(%{flags: _} = node, flag) do
flagging = %{:invalid => __MODULE__, flag => __MODULE__}
update_in(node.flags, &Map.merge(&1, flagging))
end
def put_flag(%{flags: _} = node, flag) do
Absinthe.Blueprint.put_flag(node, flag, __MODULE__)
end
def inherit_invalid(%{flags: _} = node, children, add_flag) do
case any_invalid?(children) do
true ->
flag_invalid(node, add_flag)
false ->
node
end
end
end
end
@spec put_error(Blueprint.node_t(), Phase.Error.t()) :: Blueprint.node_t()
def put_error(%{errors: _} = node, error) do
update_in(node.errors, &[error | &1])
end
def any_invalid?(nodes) do
Enum.any?(nodes, &match?(%{flags: %{invalid: _}}, &1))
end
@callback run(any, any) :: result_t
end
defmodule Absinthe.Utils.Suggestion do
@jaro_threshold 0.70
@doc """
Sort a list of suggestions by Jaro distance to a target string,
supporting a cut-off threshold.
"""
@spec sort_list([String.t()], String.t(), float) :: [String.t()]
def sort_list(suggestions, target, threshold \\ @jaro_threshold)
def sort_list(suggestions, target, threshold) do
Enum.map(suggestions, fn s -> {s, String.jaro_distance(s, target)} end)
|> Enum.filter(fn {_, x} -> x >= threshold end)
|> Enum.sort_by(fn {_, x} -> x end)
|> Enum.map(fn {s, _} -> s end)
end
end
defmodule Absinthe.Language.BooleanValue do
@moduledoc false
alias Absinthe.Blueprint
defstruct [
:value,
:loc
]
@type t :: %__MODULE__{
value: boolean,
loc: Absinthe.Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Input.Boolean{
value: Absinthe.Blueprint.Draft.convert(node.value, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.VariableDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct variable: nil,
type: nil,
default_value: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
variable: Language.Variable.t(),
type: Language.type_reference_t(),
default_value: any,
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Document.VariableDefinition{
name: node.variable.name,
type: Blueprint.Draft.convert(node.type, doc),
default_value: Blueprint.Draft.convert(node.default_value, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.FloatValue do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct [
:value,
:loc
]
@type t :: %__MODULE__{
value: float,
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, _doc) do
%Blueprint.Input.Float{
value: node.value,
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.SelectionSet do
@moduledoc false
alias Absinthe.Language
defstruct selections: [],
loc: %{line: nil}
@type t :: %__MODULE__{
selections: [
Language.FragmentSpread.t() | Language.InlineFragment.t() | Language.Field.t()
],
loc: Language.loc_t()
}
defimpl Absinthe.Traversal.Node do
def children(node, _schema) do
node.selections
end
end
end
defmodule Absinthe.Language.EnumValueDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
@enforce_keys [:value]
defstruct [
:value,
description: nil,
directives: [],
loc: %{line: nil, column: nil}
]
@type t :: %__MODULE__{
value: String.t(),
description: nil | String.t(),
directives: [Language.Directive.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.EnumValueDefinition{
value: node.value,
description: node.value,
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.Field do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct alias: nil,
name: nil,
arguments: [],
directives: [],
selection_set: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
alias: nil | String.t(),
name: String.t(),
arguments: [Absinthe.Language.Argument.t()],
directives: [Absinthe.Language.Directive.t()],
selection_set: Absinthe.Language.SelectionSet.t(),
loc: Absinthe.Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Document.Field{
name: node.name,
alias: node.alias,
selections: Absinthe.Blueprint.Draft.convert(selections(node.selection_set), doc),
arguments: Absinthe.Blueprint.Draft.convert(node.arguments, doc),
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
@spec selections(nil | Language.SelectionSet.t()) :: [
Language.Field.t() | Language.InlineFragment.t() | Language.FragmentSpread.t()
]
defp selections(nil), do: []
defp selections(node), do: node.selections
end
defimpl Absinthe.Traversal.Node do
def children(node, _schema) do
[node.arguments, node.directives, node.selection_set |> List.wrap()]
|> Enum.concat()
end
end
end
defmodule Absinthe.Language.Argument do
@moduledoc false
alias Absinthe.Blueprint
defstruct name: nil,
value: nil,
loc: %{}
@type t :: %__MODULE__{
name: String.t(),
value: %{value: any},
loc: Absinthe.Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Input.Argument{
name: node.name,
input_value: %Blueprint.Input.RawValue{
content: Absinthe.Blueprint.Draft.convert(node.value, doc)
},
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
defimpl Absinthe.Traversal.Node do
def children(node, _schema) do
[node.value]
end
end
end
defmodule Absinthe.Language.NamedType do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, _doc) do
%Blueprint.TypeReference.Name{
name: node.name
}
end
end
end
defmodule Absinthe.Language.FieldDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
description: nil,
arguments: [],
directives: [],
type: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
arguments: [Language.Argument.t()],
directives: [Language.Directive.t()],
type: Language.type_reference_t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.FieldDefinition{
name: node.name,
description: node.description,
identifier: node.name |> Macro.underscore() |> String.to_atom(),
arguments: Absinthe.Blueprint.Draft.convert(node.arguments, doc),
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
type: Absinthe.Blueprint.Draft.convert(node.type, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.ListValue do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct values: [],
loc: nil
@type t :: %__MODULE__{
values: [Language.value_t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Input.List{
items:
node.values
|> Enum.map(fn value ->
%Blueprint.Input.RawValue{content: Blueprint.Draft.convert(value, doc)}
end),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.EnumValue do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct value: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
value: any,
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, _doc) do
%Blueprint.Input.Enum{
value: node.value,
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.FragmentSpread do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
directives: [],
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
directives: [Language.Directive.t()]
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Document.Fragment.Spread{
name: node.name,
directives: Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}) do
nil
end
defp source_location(%{loc: loc}) do
Blueprint.SourceLocation.at(loc)
end
end
defimpl Absinthe.Traversal.Node do
def children(node, _schema) do
node.directives
end
end
end
defmodule Absinthe.Language.IntValue do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct [
:value,
:loc
]
@type t :: %__MODULE__{
value: integer,
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, _doc) do
%Blueprint.Input.Integer{
value: node.value,
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.InputObjectTypeDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
description: nil,
fields: [],
directives: [],
loc: %{line: nil},
errors: []
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
fields: [Language.InputValueDefinition.t()],
directives: [Language.Directive.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.InputObjectTypeDefinition{
name: node.name,
description: node.description,
fields:
for value <- Absinthe.Blueprint.Draft.convert(node.fields, doc) do
%{value | placement: :input_field_definition}
end,
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.TypeExtensionDefinition do
@moduledoc false
alias Absinthe.Language
defstruct definition: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
definition: Language.ObjectTypeDefinition.t(),
loc: Language.loc_t()
}
end
defmodule Absinthe.Language.ObjectTypeDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
description: nil,
directives: [],
interfaces: [],
fields: [],
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
directives: [Language.Directive.t()],
interfaces: [Language.NamedType.t()],
fields: [Language.FieldDefinition.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.ObjectTypeDefinition{
name: node.name,
description: node.description,
identifier: Macro.underscore(node.name) |> String.to_atom(),
fields: Absinthe.Blueprint.Draft.convert(node.fields, doc),
interfaces: Absinthe.Blueprint.Draft.convert(node.interfaces, doc),
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.ObjectValue do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct fields: [],
loc: nil
@type t :: %__MODULE__{
fields: [Language.ObjectField.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Input.Object{
fields: Absinthe.Blueprint.Draft.convert(node.fields, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.ListType do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct type: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
type: Language.type_reference_t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.TypeReference.List{
of_type: Absinthe.Blueprint.Draft.convert(node.type, doc)
}
end
end
end
defmodule Absinthe.Language.StringValue do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct [
:value,
:loc
]
@type t :: %__MODULE__{
value: String.t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, _doc) do
%Blueprint.Input.String{
value: node.value,
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.Source do
@moduledoc false
# A representation of source input to GraphQL, mostly useful for clients
# who store GraphQL documents in source files; for example, if the GraphQL
# input is in a file `Foo.graphql`, it might be useful for name to be
# `"Foo.graphql"`.
#
# ## Examples
#
# @filename "Foo.graphql"
# # ...
# {:ok, data} = File.read(@filename)
# %Absinthe.Language.Source{body: body, name: @filename}
# |> Absinthe.run(App.Schema)
defstruct body: "",
name: "GraphQL"
@type t :: %__MODULE__{
body: String.t(),
name: String.t()
}
end
defmodule Absinthe.Language.ObjectField do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
value: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
value: Language.value_t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Input.Field{
name: node.name,
input_value: %Blueprint.Input.RawValue{content: Blueprint.Draft.convert(node.value, doc)},
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.OperationDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct operation: nil,
name: nil,
variable_definitions: [],
directives: [],
selection_set: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
operation: :query | :mutation | :subscription,
name: nil | String.t(),
variable_definitions: [Language.VariableDefinition.t()],
directives: [Language.Directive.t()],
selection_set: Language.SelectionSet.t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Document.Operation{
name: node.name,
type: node.operation,
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
variable_definitions: Blueprint.Draft.convert(node.variable_definitions, doc),
selections: Blueprint.Draft.convert(node.selection_set.selections, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
defimpl Absinthe.Traversal.Node do
def children(node, _schema) do
[node.variable_definitions, node.directives, List.wrap(node.selection_set)]
|> Enum.concat()
end
end
end
defmodule Absinthe.Language.NonNullType do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct type: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
type: Language.type_reference_t(),
loc: Language.t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.TypeReference.NonNull{
of_type: Blueprint.Draft.convert(node.type, doc)
}
end
end
end
defmodule Absinthe.Language.EnumTypeDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
description: nil,
values: [],
directives: [],
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
values: [String.t()],
directives: [Language.Directive.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.EnumTypeDefinition{
name: node.name,
description: node.description,
values: Absinthe.Blueprint.Draft.convert(node.values, doc),
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.InterfaceTypeDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
description: nil,
fields: [],
directives: [],
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
fields: [Language.FieldDefinition.t()],
directives: [Language.Directive.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.InterfaceTypeDefinition{
name: node.name,
description: node.description,
fields: Absinthe.Blueprint.Draft.convert(node.fields, doc),
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.Variable do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, _doc) do
%Blueprint.Input.Variable{
name: node.name,
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.Fragment do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
type_condition: nil,
directives: [],
selection_set: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
type_condition: nil | Language.NamedType.t(),
directives: [Language.Directive.t()],
selection_set: Language.SelectionSet.t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Document.Fragment.Named{
name: node.name,
type_condition: Blueprint.Draft.convert(node.type_condition, doc),
selections: Blueprint.Draft.convert(node.selection_set.selections, doc),
directives: Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}) do
nil
end
defp source_location(%{loc: loc}) do
Blueprint.SourceLocation.at(loc)
end
end
defimpl Absinthe.Traversal.Node do
def children(node, _schema) do
[node.directives, List.wrap(node.selection_set)]
|> Enum.concat()
end
end
end
defmodule Absinthe.Language.InputValueDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
@enforce_keys [:name, :type]
defstruct [
:name,
:type,
description: nil,
default_value: nil,
directives: [],
loc: %{line: nil}
]
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
type: Language.input_t(),
default_value: Language.input_t(),
directives: [Language.Directive.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.InputValueDefinition{
name: node.name,
description: node.description,
type: Blueprint.Draft.convert(node.type, doc),
identifier: Macro.underscore(node.name) |> String.to_atom(),
default_value: Blueprint.Draft.convert(node.default_value, doc),
directives: Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.NullValue do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct [
:loc
]
@type t :: %__MODULE__{
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, _doc) do
%Blueprint.Input.Null{
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.UnionTypeDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
description: nil,
directives: [],
types: [],
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
directives: [Language.Directive.t()],
types: [Language.NamedType.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.UnionTypeDefinition{
name: node.name,
description: node.description,
types: Absinthe.Blueprint.Draft.convert(node.types, doc),
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.SchemaDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct description: nil,
directives: [],
fields: [],
loc: %{line: nil}
@type t :: %__MODULE__{
description: nil | String.t(),
directives: [Language.Directive.t()],
fields: [Language.FieldDefinition.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.SchemaDefinition{
description: node.description,
type_definitions: Absinthe.Blueprint.Draft.convert(node.fields, doc),
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.Document do
@moduledoc false
require Logger
alias Absinthe.{Blueprint, Language}
defstruct definitions: [],
loc: %{line: nil}
@typedoc false
@type t :: %__MODULE__{
definitions: [Absinthe.Traversal.Node.t()],
loc: Language.loc_t()
}
@doc "Extract a named operation definition from a document"
@spec get_operation(t, String.t()) :: nil | Absinthe.Language.OperationDefinition.t()
def get_operation(%{definitions: definitions}, name) do
definitions
|> Enum.find(nil, fn
%Language.OperationDefinition{name: ^name} ->
true
_ ->
false
end)
end
@doc false
@spec fragments_by_name(Absinthe.Language.Document.t()) :: %{
String.t() => Absinthe.Language.Fragment.t()
}
def fragments_by_name(%{definitions: definitions}) do
definitions
|> Enum.reduce(%{}, fn statement, memo ->
case statement do
%Absinthe.Language.Fragment{} ->
memo |> Map.put(statement.name, statement)
_ ->
memo
end
end)
end
defimpl Blueprint.Draft do
@operations [
Language.OperationDefinition
]
@types [
Language.SchemaDefinition,
Language.EnumTypeDefinition,
Language.InputObjectTypeDefinition,
Language.InputValueDefinition,
Language.InterfaceTypeDefinition,
Language.ObjectTypeDefinition,
Language.ScalarTypeDefinition,
Language.UnionTypeDefinition
]
@directives [
Language.DirectiveDefinition
]
@fragments [
Language.Fragment
]
def convert(node, bp) do
Enum.reduce(node.definitions, bp, &convert_definition(&1, node, &2))
end
defp convert_definition(%struct{} = node, doc, blueprint) when struct in @operations do
update_in(blueprint.operations, &[Blueprint.Draft.convert(node, doc) | &1])
end
defp convert_definition(%struct{} = node, doc, blueprint) when struct in @types do
update_in(blueprint.schema_definitions, &[Blueprint.Draft.convert(node, doc) | &1])
end
defp convert_definition(%struct{} = node, doc, blueprint) when struct in @directives do
update_in(blueprint.directives, &[Blueprint.Draft.convert(node, doc) | &1])
end
defp convert_definition(%struct{} = node, doc, blueprint) when struct in @fragments do
update_in(blueprint.fragments, &[Blueprint.Draft.convert(node, doc) | &1])
end
end
defimpl Absinthe.Traversal.Node do
def children(%{definitions: definitions}, _schema), do: definitions
end
end
defmodule Absinthe.Language.Directive do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
arguments: [],
loc: nil
@type t :: %__MODULE__{
name: String.t(),
arguments: [Language.Argument],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Directive{
name: node.name,
arguments: Absinthe.Blueprint.Draft.convert(node.arguments, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.InlineFragment do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct type_condition: nil,
directives: [],
selection_set: nil,
loc: %{line: nil}
@type t :: %__MODULE__{
type_condition: nil | Language.NamedType.t(),
directives: [Language.Directive.t()],
selection_set: Language.SelectionSet.t(),
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Document.Fragment.Inline{
type_condition: Blueprint.Draft.convert(node.type_condition, doc),
selections: Blueprint.Draft.convert(node.selection_set.selections, doc),
directives: Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}) do
nil
end
defp source_location(%{loc: loc}) do
Blueprint.SourceLocation.at(loc)
end
end
defimpl Absinthe.Traversal.Node do
def children(node, _schema) do
[List.wrap(node.type_condition), node.directives, List.wrap(node.selection_set)]
|> Enum.concat()
end
end
end
defmodule Absinthe.Language.DirectiveDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
description: nil,
arguments: [],
directives: [],
locations: [],
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
directives: [Language.Directive.t()],
arguments: [Language.Argument.t()],
locations: [String.t()],
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.DirectiveDefinition{
name: node.name,
description: node.description,
arguments: Absinthe.Blueprint.Draft.convert(node.arguments, doc),
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
locations: node.locations,
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Language.ScalarTypeDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct name: nil,
description: nil,
directives: [],
loc: %{line: nil}
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
directives: [Language.Directive.t()],
loc: Language.t()
}
defimpl Blueprint.Draft do
def convert(node, doc) do
%Blueprint.Schema.ScalarTypeDefinition{
name: node.name,
description: node.description,
directives: Absinthe.Blueprint.Draft.convert(node.directives, doc),
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
defmodule Absinthe.Adapter.Underscore do
@moduledoc """
Underscores external input and leaves external input alone. Unlike the
`Absinthe.Adapter.Passthrough` this does not break introspection (because
introspection relies on underscoring incoming introspection queries which we
still do).
"""
use Absinthe.Adapter
def to_internal_name(nil, _role) do
nil
end
def to_internal_name("__" <> camelized_name, role) do
"__" <> to_internal_name(camelized_name, role)
end
def to_internal_name(camelized_name, _role) do
camelized_name
|> Macro.underscore()
end
def to_external_name(name, _role) do
name
end
end
defmodule Absinthe.Adapter.Passthrough do
@moduledoc """
The default adapter, which makes no changes to incoming query document
ASTs or outgoing results.
"""
use Absinthe.Adapter
def load_document(doc), do: doc
def dump_results(results), do: results
end
defmodule Absinthe.Adapter.LanguageConventions do
use Absinthe.Adapter
alias Absinthe.Utils
@moduledoc """
This defines an adapter that supports GraphQL query documents in their
conventional (in JS) camelcase notation, while allowing the schema to be
defined using conventional (in Elixir) underscore (snakecase) notation, and
tranforming the names as needed for lookups, results, and error messages.
For example, this document:
```
{
myUser: createUser(userId: 2) {
firstName
lastName
}
}
```
Would map to an internal schema that used the following names:
* `create_user` instead of `createUser`
* `user_id` instead of `userId`
* `first_name` instead of `firstName`
* `last_name` instead of `lastName`
Likewise, the result of executing this (camelcase) query document against our
(snakecase) schema would have its names transformed back into camelcase on the
way out:
```
%{
data: %{
"myUser" => %{
"firstName" => "Joe",
"lastName" => "Black"
}
}
}
```
Note variables are a client-facing concern (they may be provided as
parameters), so variable names should match the convention of the query
document (eg, camelCase).
"""
@doc "Converts a camelCase to snake_case"
def to_internal_name(nil, _role) do
nil
end
def to_internal_name("__" <> camelized_name, role) do
"__" <> to_internal_name(camelized_name, role)
end
def to_internal_name(camelized_name, :operation) do
camelized_name
end
def to_internal_name(camelized_name, _role) do
camelized_name
|> Macro.underscore()
end
@doc "Converts a snake_case name to camelCase"
def to_external_name(nil, _role) do
nil
end
def to_external_name("__" <> underscored_name, role) do
"__" <> to_external_name(underscored_name, role)
end
def to_external_name(<<c::utf8, _::binary>> = name, _) when c in ?A..?Z do
name |> Utils.camelize()
end
def to_external_name(underscored_name, _role) do
underscored_name
|> Utils.camelize(lower: true)
end
end
defmodule Absinthe.Introspection.Field do
@moduledoc false
use Absinthe.Schema.Notation
alias Absinthe.Schema
alias Absinthe.Type
def meta("typename") do
%Type.Field{
name: "__typename",
type: :string,
description: "The name of the object type currently being queried.",
middleware: [
Absinthe.Resolution.resolver_spec(fn
_, %{parent_type: %Type.Object{} = type} ->
{:ok, type.name}
_, %{source: source, parent_type: %Type.Interface{} = iface} = env ->
case Type.Interface.resolve_type(iface, source, env) do
nil ->
{:error, "Could not resolve type of concrete " <> iface.name}
type ->
{:ok, type.name}
end
_, %{source: source, parent_type: %Type.Union{} = union} = env ->
case Type.Union.resolve_type(union, source, env) do
nil ->
{:error, "Could not resolve type of concrete " <> union.name}
type ->
{:ok, type.name}
end
end)
]
}
end
def meta("type") do
%Type.Field{
name: "__type",
type: :__type,
description: "Represents scalars, interfaces, object types, unions, enums in the system",
args: %{
name: %Type.Argument{
identifier: :name,
name: "name",
type: %Absinthe.Type.NonNull{of_type: :string},
description: "The name of the type to introspect"
}
},
middleware: [
Absinthe.Resolution.resolver_spec(fn %{name: name}, %{schema: schema} ->
{:ok, Schema.lookup_type(schema, name)}
end)
]
}
end
def meta("schema") do
%Type.Field{
name: "__schema",
type: :__schema,
description: "Represents the schema",
middleware: [
Absinthe.Resolution.resolver_spec(fn _, %{schema: schema} ->
{:ok, schema}
end)
]
}
end
end
defmodule Absinthe.Introspection.Kind do
@moduledoc false
defmacro __using__(_opts) do
quote do
@behaviour unquote(__MODULE__)
def kind do
__MODULE__
|> Module.split()
|> List.last()
|> Absinthe.Introspection.Kind.upcase()
end
defoverridable kind: 0
end
end
def upcase(name) do
Regex.scan(~r{[A-Z]+[a-z]+}, name)
|> List.flatten()
|> Enum.map(&String.upcase/1)
|> Enum.join("_")
end
@callback kind :: binary
end
defmodule Absinthe.Subscription do
@moduledoc """
Real time updates via GraphQL
For a how to guide on getting started with Absinthe.Subscriptions in your phoenix
project see the Absinthe.Phoenix package.
Define in your schema via `Absinthe.Schema.subscription/2`
## Basic Usage
## Performance Characteristics
There are a couple of limitations to the beta release of subscriptions that
are worth keeping in mind if you want to use this in production:
By design, all subscription docs triggered by a mutation are run inside the
mutation process as a form of back pressure.
At the moment however database batching does not happen across the set of
subscription docs. Thus if you have a lot of subscription docs and they each
do a lot of extra DB lookups you're going to delay incoming mutation responses
by however long it takes to do all that work.
Before the final version of 1.4.0 we want
- Batching across subscriptions
- More user control over back pressure / async balance.
"""
require Logger
alias __MODULE__
@doc """
Add Absinthe.Subscription to your process tree.
"""
defdelegate start_link(pubsub), to: Subscription.Supervisor
def child_spec(pubsub) do
%{
id: __MODULE__,
start: {Subscription.Supervisor, :start_link, [pubsub]},
type: :supervisor
}
end
@type subscription_field_spec :: {atom, term | (term -> term)}
@doc """
Publish a mutation
This function is generally used when trying to publish to one or more subscription
fields "out of band" from any particular mutation.
## Examples
Note: As with all subscription examples if you're using Absinthe.Phoenix `pubsub`
will be `MyApp.Web.Endpoint`.
```
Absinthe.Subscription.publish(pubsub, user, [new_users: user.account_id])
```
```
# publish to two subscription fields
Absinthe.Subscription.publish(pubsub, user, [
new_users: user.account_id,
other_user_subscription_field: user.id,
])
```
"""
@spec publish(
Absinthe.Subscription.Pubsub.t(),
term,
Absinthe.Resolution.t() | [subscription_field_spec]
) :: :ok
def publish(pubsub, mutation_result, %Absinthe.Resolution{} = info) do
subscribed_fields = get_subscription_fields(info)
publish(pubsub, mutation_result, subscribed_fields)
end
def publish(pubsub, mutation_result, subscribed_fields) do
_ = publish_remote(pubsub, mutation_result, subscribed_fields)
_ = Subscription.Local.publish_mutation(pubsub, mutation_result, subscribed_fields)
:ok
end
defp get_subscription_fields(resolution_info) do
mutation_field = resolution_info.definition.schema_node
schema = resolution_info.schema
subscription = Absinthe.Schema.lookup_type(schema, :subscription) || %{fields: []}
subscription_fields = Map.take(subscription.fields, mutation_field.triggers)
for {sub_field_id, sub_field} <- subscription_fields do
triggers = Absinthe.Type.function(sub_field, :triggers)
config = Map.fetch!(triggers, mutation_field.identifier)
{sub_field_id, config}
end
end
@doc false
def subscribe(pubsub, field_key, doc_id, doc) do
registry = pubsub |> registry_name
{:ok, _} = Registry.register(registry, field_key, {doc_id, doc})
{:ok, _} = Registry.register(registry, {self(), doc_id}, field_key)
end
@doc false
def unsubscribe(pubsub, doc_id) do
registry = pubsub |> registry_name
self = self()
for {^self, field_key} <- Registry.lookup(registry, {self, doc_id}) do
Registry.unregister_match(registry, field_key, {doc_id, :_})
end
Registry.unregister(registry, {self, doc_id})
:ok
end
@doc false
def get(pubsub, key) do
pubsub
|> registry_name
|> Registry.lookup(key)
|> Enum.map(&elem(&1, 1))
|> Map.new()
end
@doc false
def registry_name(pubsub) do
Module.concat([pubsub, :Registry])
end
@doc false
def publish_remote(pubsub, mutation_result, subscribed_fields) do
{:ok, pool_size} =
pubsub
|> registry_name
|> Registry.meta(:pool_size)
shard = :erlang.phash2(mutation_result, pool_size)
proxy_topic = Subscription.Proxy.topic(shard)
:ok = pubsub.publish_mutation(proxy_topic, mutation_result, subscribed_fields)
end
## Middleware callback
@doc false
def call(%{state: :resolved, errors: [], value: value} = res, _) do
with {:ok, pubsub} <- extract_pubsub(res.context) do
__MODULE__.publish(pubsub, value, res)
end
res
end
def call(res, _), do: res
@doc false
def extract_pubsub(context) do
with {:ok, pubsub} <- Map.fetch(context, :pubsub),
pid when is_pid(pid) <- Process.whereis(registry_name(pubsub)) do
{:ok, pubsub}
else
_ -> :error
end
end
@doc false
def add_middleware(%{identifier: :mutation} = node) do
Map.update!(node, :fields, fn fields ->
for {ident, field} <- fields, into: %{} do
field = Map.update!(field, :middleware, &(&1 ++ [{__MODULE__, []}]))
{ident, field}
end
end)
end
def add_middleware(type), do: type
end
defmodule Absinthe.Subscription.Local do
@moduledoc false
require Logger
alias Absinthe.Pipeline.BatchResolver
# This module handles running and broadcasting documents that are local to this
# node.
def publish_mutation(pubsub, mutation_result, subscribed_fields) do
docs_and_topics =
for {field, key_strategy} <- subscribed_fields,
{topic, doc} <- get_docs(pubsub, field, mutation_result, key_strategy) do
{{topic, {field, key_strategy}}, put_in(doc.execution.root_value, mutation_result)}
end
docs_by_context = group_by_context(docs_and_topics)
for docset <- docs_by_context do
run_docset(pubsub, docset)
end
end
defp group_by_context(docs_and_topics) do
docs_and_topics
|> Enum.group_by(fn {_, doc} -> doc.execution.context end)
|> Map.values()
end
defp run_docset(pubsub, docs_and_topics) do
{topics, docs} = Enum.unzip(docs_and_topics)
docs = BatchResolver.run(docs, schema: hd(docs).schema, abort_on_error: false)
pipeline = [
Absinthe.Phase.Document.Result
]
for {doc, {topic, key_strategy}} <- Enum.zip(docs, topics), doc != :error do
try do
{:ok, %{result: data}, _} = Absinthe.Pipeline.run(doc, pipeline)
Logger.debug("""
Absinthe Subscription Publication
Field Topic: #{inspect(key_strategy)}
Subscription id: #{inspect(topic)}
Data: #{inspect(data)}
""")
:ok = pubsub.publish_subscription(topic, data)
rescue
e ->
BatchResolver.pipeline_error(e, System.stacktrace())
end
end
end
defp get_docs(pubsub, field, mutation_result, topic: topic_fun)
when is_function(topic_fun, 1) do
do_get_docs(pubsub, field, topic_fun.(mutation_result))
end
defp get_docs(pubsub, field, _mutation_result, key) do
do_get_docs(pubsub, field, key)
end
defp do_get_docs(pubsub, field, keys) do
keys
|> List.wrap()
|> Enum.map(&to_string/1)
|> Enum.flat_map(&Absinthe.Subscription.get(pubsub, {field, &1}))
end
end
defmodule Absinthe.Subscription.Proxy do
@moduledoc false
use GenServer
defstruct [
:pubsub
]
alias Absinthe.Subscription
def start_link(pubsub, shard) do
GenServer.start_link(__MODULE__, {pubsub, shard})
end
def topic(shard), do: "__absinthe__:proxy:#{shard}"
def init({pubsub, shard}) do
:ok = pubsub.subscribe(topic(shard))
{:ok, %__MODULE__{pubsub: pubsub}}
end
def handle_info(%{node: src_node}, state) when src_node == node() do
{:noreply, state}
end
def handle_info(payload, state) do
# There's no meaningful form of backpressure to have here, and we can't
# bottleneck execution inside each proxy process
# TODO: This should maybe be supervised? I feel like the linking here isn't
# what it should be.
Task.start_link(fn ->
Subscription.Local.publish_mutation(
state.pubsub,
payload.mutation_result,
payload.subscribed_fields
)
end)
{:noreply, state}
end
end
defmodule Absinthe.Subscription.Supervisor do
@moduledoc false
use Supervisor
def start_link(pubsub, pool_size \\ System.schedulers_online() * 2) do
Supervisor.start_link(__MODULE__, {pubsub, pool_size})
end
def init({pubsub, pool_size}) do
registry_name = Absinthe.Subscription.registry_name(pubsub)
meta = [pool_size: pool_size]
children = [
supervisor(Registry, [
:duplicate,
registry_name,
[partitions: System.schedulers_online(), meta: meta]
]),
supervisor(Absinthe.Subscription.ProxySupervisor, [pubsub, registry_name, pool_size])
]
supervise(children, strategy: :one_for_one)
end
end
defmodule Absinthe.Subscription.ProxySupervisor do
@moduledoc false
use Supervisor
def start_link(pubsub, registry, pool_size) do
Supervisor.start_link(__MODULE__, {pubsub, registry, pool_size})
end
def init({pubsub, _registry, pool_size}) do
# Shard numbers are generated by phash2 which is 0-based:
children =
for shard <- 0..(pool_size - 1) do
worker(Absinthe.Subscription.Proxy, [pubsub, shard], id: shard)
end
supervise(children, strategy: :one_for_one)
end
end
defmodule Absinthe.Subscription.Pubsub do
@moduledoc """
Pubsub behaviour expected by Absinthe to power subscriptions
A subscription includes a GraphQL query document that resolves to a set of
objects and fields. When the subscription is triggered, Absinthe will run the
document and publish the resolved objects to subscribers through a module that
implements the behaviour defined here.
Each application is free to implement the PubSub behavior in its own way.
For example, the absinthe_phoenix project implements the subscription pubsub
using Phoenix.PubSub by way of the application's Endpoint. Regardless
of the underlying mechanisms, the implementation should maintain the type
signatures and expected behaviors of the callbacks below.
"""
@type t :: module()
@doc """
Subscribe the current process for messages about the given topic.
"""
@callback subscribe(topic :: binary) :: term
@doc """
An Absinthe.Subscription.Pubsub system may extend across multiple nodes in a
cluster. Processes need only subscribe to the pubsub process that
is running on their own node.
However, mutations can happen on any node in the custer and must to be
broadcast to other nodes so that they can also reevaluate their GraphQL
subscriptions and notify subscribers on that node.
When told of a mutation, Absinthe invokes the `publish_mutation` function
on the node in which the mutation is processed first. The function should
publish a message to the given `proxy_topic`, with the identity of node
on which the mutation occurred included in the broadcast message.
The message broadcast should be a map that contains, at least
%{
node: node_id, # probably from Kernel.node/0
mutation_result: …, # from arguments
subscribed_fields: … # from arguments
# other fields as needed
}
"""
@callback publish_mutation(
proxy_topic :: binary,
mutation_result :: term,
subscribed_fields :: list
) :: term
@doc """
After a mutation is published, and Absinthe has re-run the necessary GraphQL
subscriptions to generate a new set of resolved data, it calls
`publish_subscription`.
Your pubsub implementation should publish a message to the given topic, with
the newly resolved data. The broadcast should be limited to the current node
only.
"""
@callback publish_subscription(topic :: binary, data :: map) :: term
end
defmodule Absinthe.Schema.Notation do
alias Absinthe.Blueprint.Schema
alias Absinthe.Utils
Module.register_attribute(__MODULE__, :placement, accumulate: true)
defmacro __using__(_opts) do
Module.register_attribute(__CALLER__.module, :absinthe_blueprint, accumulate: true)
Module.register_attribute(__CALLER__.module, :absinthe_desc, accumulate: true)
put_attr(__CALLER__.module, %Absinthe.Blueprint{schema: __CALLER__.module})
quote do
import Absinthe.Resolution.Helpers,
only: [
async: 1,
async: 2,
batch: 3,
batch: 4
]
Module.register_attribute(__MODULE__, :__absinthe_type_import__, accumulate: true)
@desc nil
import unquote(__MODULE__), only: :macros
@before_compile unquote(__MODULE__)
end
end
### Macro API ###
@placement {:config, [under: [:field]]}
@doc """
Configure a subscription field.
## Example
```elixir
config fn args, %{context: context} ->
if authorized?(context) do
{:ok, topic: args.client_id}
else
{:error, "unauthorized"}
end
end
```
See `Absinthe.Schema.subscription/1` for details
"""
defmacro config(config_fun) do
__CALLER__
|> recordable!(:config, @placement[:config])
|> record_config!(config_fun)
end
@placement {:trigger, [under: [:field]]}
@doc """
Set a trigger for a subscription field.
It accepts one or more mutation field names, and can be called more than once.
```
mutation do
field :gps_event, :gps_event
field :user_checkin, :user
end
subscription do
field :location_update, :user do
arg :user_id, non_null(:id)
config fn args, _ ->
{:ok, topic: args.user_id}
end
trigger :gps_event, topic: fn event ->
event.user_id
end
trigger :user_checkin, topic: fn user ->
[user.id, user.parent_id]
end
end
end
```
Trigger functions are only called once per event, so database calls within
them do not present a significant burden.
See the `subscription/2` macro docs for additional details
"""
defmacro trigger(mutations, attrs) do
__CALLER__
|> recordable!(:trigger, @placement[:trigger])
|> record_trigger!(List.wrap(mutations), attrs)
end
# OBJECT
@placement {:object, [toplevel: true]}
@doc """
Define an object type.
Adds an `Absinthe.Type.Object` to your schema.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
Basic definition:
```
object :car do
# ...
end
```
Providing a custom name:
```
object :car, name: "CarType" do
# ...
end
```
"""
@reserved_identifiers ~w(query mutation subscription)a
defmacro object(identifier, attrs \\ [], block)
defmacro object(identifier, _attrs, _block) when identifier in @reserved_identifiers do
raise Absinthe.Schema.Notation.Error,
"Invalid schema notation: cannot create an `object` with reserved identifier `#{
identifier
}`"
end
defmacro object(identifier, attrs, do: block) do
{attrs, block} =
case Keyword.pop(attrs, :meta) do
{nil, attrs} ->
{attrs, block}
{meta, attrs} ->
meta_ast =
quote do
meta unquote(meta)
end
block = [meta_ast, block]
{attrs, block}
end
__CALLER__
|> recordable!(:object, @placement[:object])
|> record!(Schema.ObjectTypeDefinition, identifier, attrs, block)
end
@placement {:interfaces, [under: :object]}
@doc """
Declare implemented interfaces for an object.
See also `interface/1`, which can be used for one interface,
and `interface/3`, used to define interfaces themselves.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
object :car do
interfaces [:vehicle, :branded]
# ...
end
```
"""
defmacro interfaces(ifaces) when is_list(ifaces) do
__CALLER__
|> recordable!(:interfaces, @placement[:interfaces])
|> record_interfaces!(ifaces)
end
@placement {:resolve, [under: [:field]]}
@doc """
Mark a field as deprecated
In most cases you can simply pass the deprecate: "message" attribute. However
when using the block form of a field it can be nice to also use this macro.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
field :foo, :string do
deprecate "Foo will no longer be supported"
end
```
This is how to deprecate other things
```
field :foo, :string do
arg :bar, :integer, deprecate: "This isn't supported either"
end
enum :colors do
value :red
value :blue, deprecate: "This isn't supported"
end
```
"""
defmacro deprecate(msg) do
__CALLER__
|> recordable!(:deprecate, @placement[:deprecate])
|> record_deprecate!(msg)
end
@doc """
Declare an implemented interface for an object.
Adds an `Absinthe.Type.Interface` to your schema.
See also `interfaces/1`, which can be used for multiple interfaces,
and `interface/3`, used to define interfaces themselves.
## Examples
```
object :car do
interface :vehicle
# ...
end
```
"""
@placement {:interface_attribute, [under: :object]}
defmacro interface(identifier) do
__CALLER__
|> recordable!(
:interface_attribute,
@placement[:interface_attribute],
as: "`interface` (as an attribute)"
)
|> record_interface!(identifier)
end
# INTERFACES
@placement {:interface, [toplevel: true]}
@doc """
Define an interface type.
Adds an `Absinthe.Type.Interface` to your schema.
Also see `interface/1` and `interfaces/1`, which declare
that an object implements one or more interfaces.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
interface :vehicle do
field :wheel_count, :integer
end
object :rally_car do
field :wheel_count, :integer
interface :vehicle
end
```
"""
defmacro interface(identifier, attrs \\ [], do: block) do
__CALLER__
|> recordable!(:interface, @placement[:interface])
|> record!(Schema.InterfaceTypeDefinition, identifier, attrs, block)
end
@placement {:resolve_type, [under: [:interface, :union]]}
@doc """
Define a type resolver for a union or interface.
See also:
* `Absinthe.Type.Interface`
* `Absinthe.Type.Union`
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
interface :entity do
# ...
resolve_type fn
%{employee_count: _}, _ ->
:business
%{age: _}, _ ->
:person
end
end
```
"""
defmacro resolve_type(func_ast) do
__CALLER__
|> recordable!(:resolve_type, @placement[:resolve_type])
|> record_resolve_type!(func_ast)
end
defp handle_field_attrs(attrs, caller) do
block =
for {identifier, arg_attrs} <- Keyword.get(attrs, :args, []) do
quote do
arg unquote(identifier), unquote(arg_attrs)
end
end
{func_ast, attrs} = Keyword.pop(attrs, :resolve)
block =
if func_ast do
[
quote do
resolve unquote(func_ast)
end
]
else
[]
end ++ block
attrs =
attrs
|> expand_ast(caller)
|> Keyword.delete(:args)
|> handle_deprecate
{attrs, block}
end
defp handle_deprecate(attrs) do
deprecation = build_deprecation(attrs[:deprecate])
attrs
|> Keyword.delete(:deprecate)
|> Keyword.put(:deprecation, deprecation)
end
defp build_deprecation(msg) do
case msg do
true -> %Absinthe.Type.Deprecation{reason: nil}
reason when is_binary(reason) -> %Absinthe.Type.Deprecation{reason: reason}
_ -> nil
end
end
# FIELDS
@placement {:field, [under: [:input_object, :interface, :object]]}
@doc """
Defines a GraphQL field
See `field/4`
"""
defmacro field(identifier, attrs) when is_list(attrs) do
{attrs, block} = handle_field_attrs(attrs, __CALLER__)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
defmacro field(identifier, type) do
{attrs, block} = handle_field_attrs([type: type], __CALLER__)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
@doc """
Defines a GraphQL field
See `field/4`
"""
defmacro field(identifier, attrs, do: block) when is_list(attrs) do
{attrs, more_block} = handle_field_attrs(attrs, __CALLER__)
block = more_block ++ block
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
defmacro field(identifier, type, do: block) do
{attrs, _} = handle_field_attrs([type: type], __CALLER__)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
defmacro field(identifier, type, attrs) do
{attrs, block} = handle_field_attrs(Keyword.put(attrs, :type, type), __CALLER__)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
@doc """
Defines a GraphQL field.
## Placement
#{Utils.placement_docs(@placement)}
`query`, `mutation`, and `subscription` are
all objects under the covers, and thus you'll find `field` definitions under
those as well.
## Examples
```
field :id, :id
field :age, :integer, description: "How old the item is"
field :name, :string do
description "The name of the item"
end
field :location, type: :location
```
"""
defmacro field(identifier, type, attrs, do: block) do
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, Keyword.put(attrs, :type, type), block)
end
@placement {:resolve, [under: [:field]]}
@doc """
Defines a resolve function for a field
Specify a 2 or 3 arity function to call when resolving a field.
You can either hard code a particular anonymous function, or have a function
call that returns a 2 or 3 arity anonymous function. See examples for more information.
Note that when using a hard coded anonymous function, the function will not
capture local variables.
### 3 Arity Functions
The first argument to the function is the parent entity.
```
{
user(id: 1) {
name
}
}
```
A resolution function on the `name` field would have the result of the `user(id: 1)` field
as its first argument. Top level fields have the `root_value` as their first argument.
Unless otherwise specified, this defaults to an empty map.
The second argument to the resolution function is the field arguments. The final
argument is an `Absinthe.Resolution` struct, which includes information like
the `context` and other execution data.
### 2 Arity Function
Exactly the same as the 3 arity version, but without the first argument (the parent entity)
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
query do
field :person, :person do
resolve &Person.resolve/2
end
end
```
```
query do
field :person, :person do
resolve fn %{id: id}, _ ->
{:ok, Person.find(id)}
end
end
end
```
```
query do
field :person, :person do
resolve lookup(:person)
end
end
def lookup(:person) do
fn %{id: id}, _ ->
{:ok, Person.find(id)}
end
end
```
"""
defmacro resolve(func_ast) do
__CALLER__
|> recordable!(:resolve, @placement[:resolve])
quote do
middleware Absinthe.Resolution, unquote(func_ast)
end
end
@placement {:complexity, [under: [:field]]}
defmacro complexity(func_ast) do
__CALLER__
|> recordable!(:complexity, @placement[:complexity])
|> record_complexity!(func_ast)
end
@placement {:middleware, [under: [:field]]}
defmacro middleware(new_middleware, opts \\ []) do
__CALLER__
|> recordable!(:middleware, @placement[:middleware])
|> record_middleware!(new_middleware, opts)
end
@placement {:is_type_of, [under: [:object]]}
@doc """
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro is_type_of(func_ast) do
__CALLER__
|> recordable!(:is_type_of, @placement[:is_type_of])
|> record_is_type_of!(func_ast)
end
@placement {:arg, [under: [:directive, :field]]}
# ARGS
@doc """
Add an argument.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
field do
arg :size, :integer
arg :name, :string, description: "The desired name"
end
```
"""
defmacro arg(identifier, type, attrs) do
__CALLER__
|> recordable!(:arg, @placement[:arg])
|> record_arg!(identifier, expand_ast(Keyword.put(attrs, :type, type), __CALLER__))
end
@doc """
Add an argument.
See `arg/3`
"""
defmacro arg(identifier, attrs) when is_list(attrs) do
__CALLER__
|> recordable!(:arg, @placement[:arg])
|> record_arg!(identifier, expand_ast(attrs, __CALLER__))
end
defmacro arg(identifier, type) do
__CALLER__
|> recordable!(:arg, @placement[:arg])
|> record_arg!(identifier, expand_ast([type: type], __CALLER__))
end
# SCALARS
@placement {:scalar, [toplevel: true]}
@doc """
Define a scalar type
A scalar type requires `parse/1` and `serialize/1` functions.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
scalar :time, description: "ISOz time" do
parse &Timex.parse(&1.value, "{ISOz}")
serialize &Timex.format!(&1, "{ISOz}")
end
```
"""
defmacro scalar(identifier, attrs, do: block) do
__CALLER__
|> recordable!(:scalar, @placement[:scalar])
|> record!(Schema.ScalarTypeDefinition, identifier, attrs, block)
end
@doc """
Defines a scalar type
See `scalar/3`
"""
defmacro scalar(identifier, do: block) do
__CALLER__
|> recordable!(:scalar, @placement[:scalar])
|> record!(Schema.ScalarTypeDefinition, identifier, [], block)
end
defmacro scalar(identifier, attrs) do
__CALLER__
|> recordable!(:scalar, @placement[:scalar])
|> record!(Schema.ScalarTypeDefinition, identifier, attrs, nil)
end
@placement {:serialize, [under: [:scalar]]}
@doc """
Defines a serialization function for a `scalar` type
The specified `serialize` function is used on outgoing data. It should simply
return the desired external representation.
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro serialize(func_ast) do
__CALLER__
|> recordable!(:serialize, @placement[:serialize])
|> record_serialize!(func_ast)
end
@placement {:private,
[under: [:field, :object, :input_object, :enum, :scalar, :interface, :union]]}
@doc false
defmacro private(owner, key, value) do
__CALLER__
|> recordable!(:private, @placement[:private])
|> record_private!(owner, [{key, value}])
end
@placement {:meta,
[under: [:field, :object, :input_object, :enum, :scalar, :interface, :union]]}
@doc """
Defines a metadata key/value pair for a custom type.
For more info see `meta/1`
### Examples
```
meta :cache, false
```
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro meta(key, value) do
__CALLER__
|> recordable!(:meta, @placement[:meta])
|> record_private!(:meta, [{key, value}])
end
@doc """
Defines list of metadata's key/value pair for a custom type.
This is generally used to facilitate libraries that want to augment Absinthe
functionality
## Examples
```
object :user do
meta cache: true, ttl: 22_000
end
object :user, meta: [cache: true, ttl: 22_000] do
# ...
end
```
The meta can be accessed via the `Absinthe.Type.meta/2` function.
```
user_type = Absinthe.Schema.lookup_type(MyApp.Schema, :user)
Absinthe.Type.meta(user_type, :cache)
#=> true
Absinthe.Type.meta(user_type)
#=> [cache: true, ttl: 22_000]
```
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro meta(keyword_list) do
__CALLER__
|> recordable!(:meta, @placement[:meta])
|> record_private!(:meta, keyword_list)
end
@placement {:parse, [under: [:scalar]]}
@doc """
Defines a parse function for a `scalar` type
The specified `parse` function is used on incoming data to transform it into
an elixir datastructure.
It should return `{:ok, value}` or `:error`
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro parse(func_ast) do
__CALLER__
|> recordable!(:parse, @placement[:parse])
|> record_parse!(func_ast)
end
# DIRECTIVES
@placement {:directive, [toplevel: true]}
@doc """
Defines a directive
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
directive :mydirective do
arg :if, non_null(:boolean), description: "Skipped when true."
on [:field, :fragment_spread, :inline_fragment]
expand fn
%{if: true}, node ->
Blueprint.put_flag(node, :skip, __MODULE__)
_, node ->
node
end
end
```
"""
defmacro directive(identifier, attrs \\ [], do: block) do
__CALLER__
|> recordable!(:directive, @placement[:directive])
|> record_directive!(identifier, attrs, block)
end
@placement {:on, [under: :directive]}
@doc """
Declare a directive as operating an a AST node type
See `directive/2`
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro on(ast_node) do
__CALLER__
|> recordable!(:on, @placement[:on])
|> record_locations!(ast_node)
end
@placement {:expand, [under: :directive]}
@doc """
Define the expansion for a directive
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro expand(func_ast) do
__CALLER__
|> recordable!(:expand, @placement[:expand])
|> record_expand!(func_ast)
end
# INPUT OBJECTS
@placement {:input_object, [toplevel: true]}
@doc """
Defines an input object
See `Absinthe.Type.InputObject`
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
input_object :contact_input do
field :email, non_null(:string)
end
```
"""
defmacro input_object(identifier, attrs \\ [], do: block) do
__CALLER__
|> recordable!(:input_object, @placement[:input_object])
|> record!(Schema.InputObjectTypeDefinition, identifier, attrs, block)
end
# UNIONS
@placement {:union, [toplevel: true]}
@doc """
Defines a union type
See `Absinthe.Type.Union`
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
union :search_result do
description "A search result"
types [:person, :business]
resolve_type fn
%Person{}, _ -> :person
%Business{}, _ -> :business
end
end
```
"""
defmacro union(identifier, attrs \\ [], do: block) do
__CALLER__
|> recordable!(:union, @placement[:union])
|> record!(Schema.UnionTypeDefinition, identifier, attrs, block)
end
@placement {:types, [under: [:union]]}
@doc """
Defines the types possible under a union type
See `union/3`
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro types(types) do
__CALLER__
|> recordable!(:types, @placement[:types])
|> record_types!(types)
end
# ENUMS
@placement {:enum, [toplevel: true]}
@doc """
Defines an enum type
## Placement
#{Utils.placement_docs(@placement)}
## Examples
Handling `RED`, `GREEN`, `BLUE` values from the query document:
```
enum :color do
value :red
value :green
value :blue
end
```
A given query document might look like:
```graphql
{
foo(color: RED)
}
```
Internally you would get an argument in elixir that looks like:
```elixir
%{color: :red}
```
If your return value is an enum, it will get serialized out as:
```json
{"color": "RED"}
```
You can provide custom value mappings. Here we use `r`, `g`, `b` values:
```
enum :color do
value :red, as: "r"
value :green, as: "g"
value :blue, as: "b"
end
```
"""
defmacro enum(identifier, attrs, do: block) do
attrs = handle_enum_attrs(attrs, __CALLER__)
__CALLER__
|> recordable!(:enum, @placement[:enum])
|> record!(Schema.EnumTypeDefinition, identifier, attrs, block)
end
@doc """
Defines an enum type
See `enum/3`
"""
defmacro enum(identifier, do: block) do
__CALLER__
|> recordable!(:enum, @placement[:enum])
|> record!(Schema.EnumTypeDefinition, identifier, [], block)
end
defmacro enum(identifier, attrs) do
attrs = handle_enum_attrs(attrs, __CALLER__)
__CALLER__
|> recordable!(:enum, @placement[:enum])
|> record!(Schema.EnumTypeDefinition, identifier, attrs, [])
end
defp handle_enum_attrs(attrs, env) do
attrs
|> expand_ast(env)
|> Keyword.update(:values, [], fn values ->
Enum.map(values, fn ident ->
value_attrs = handle_enum_value_attrs(ident, module: env.module)
struct!(Schema.EnumValueDefinition, value_attrs)
end)
end)
end
@placement {:value, [under: [:enum]]}
@doc """
Defines a value possible under an enum type
See `enum/3`
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro value(identifier, raw_attrs \\ []) do
__CALLER__
|> recordable!(:value, @placement[:value])
|> record_value!(identifier, raw_attrs)
end
# GENERAL ATTRIBUTES
@placement {:description, [toplevel: false]}
@doc """
Defines a description
This macro adds a description to any other macro which takes a block.
Note that you can also specify a description by using `@desc` above any item
that can take a description attribute.
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro description(text) do
__CALLER__
|> recordable!(:description, @placement[:description])
|> record_description!(text)
end
# TYPE UTILITIES
@doc """
Marks a type reference as non null
See `field/3` for examples
"""
defmacro non_null(type) do
%Absinthe.Blueprint.TypeReference.NonNull{of_type: expand_ast(type, __CALLER__)}
end
@doc """
Marks a type reference as a list of the given type
See `field/3` for examples
"""
defmacro list_of(type) do
%Absinthe.Blueprint.TypeReference.List{of_type: expand_ast(type, __CALLER__)}
end
@placement {:import_fields, [under: [:input_object, :interface, :object]]}
@doc """
Import fields from another object
## Example
```
object :news_queries do
field :all_links, list_of(:link)
field :main_story, :link
end
object :admin_queries do
field :users, list_of(:user)
field :pending_posts, list_of(:post)
end
query do
import_fields :news_queries
import_fields :admin_queries
end
```
Import fields can also be used on objects created inside other modules that you
have used import_types on.
```
defmodule MyApp.Schema.NewsTypes do
use Absinthe.Schema.Notation
object :news_queries do
field :all_links, list_of(:link)
field :main_story, :link
end
end
defmodule MyApp.Schema.Schema do
use Absinthe.Schema
import_types MyApp.Schema.NewsTypes
query do
import_fields :news_queries
# ...
end
end
```
"""
defmacro import_fields(source_criteria, opts \\ []) do
source_criteria = expand_ast(source_criteria, __CALLER__)
put_attr(__CALLER__.module, {:import_fields, {source_criteria, opts}})
end
@placement {:import_types, [toplevel: true]}
@doc """
Import types from another module
Very frequently your schema module will simply have the `query` and `mutation`
blocks, and you'll want to break out your other types into other modules. This
macro imports those types for use the current module
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
import_types MyApp.Schema.Types
import_types MyApp.Schema.Types.{TypesA, TypesB}
```
"""
defmacro import_types(type_module_ast, opts \\ []) do
env = __CALLER__
type_module_ast
|> Macro.expand(env)
|> do_import_types(env, opts)
end
@placement {:import_sdl, [toplevel: true]}
@doc """
Import types defined using the Schema Definition Language (SDL).
TODO: Explain handlers
## Placement
#{Utils.placement_docs(@placement)}
## Examples
Directly embedded SDL:
```
import_sdl \"""
type Query {
posts: [Post]
}
type Post {
title: String!
body: String!
}
\"""
```
Loaded from a file location (supporting recompilation on change):
```
import_sdl path: "/path/to/sdl.graphql"
```
TODO: Example for dynamic loading during init
"""
defmacro import_sdl(embedded \\ nil, opts \\ []) do
__CALLER__
|> do_import_sdl(embedded, opts)
end
defmacro values(values) do
__CALLER__
|> record_values!(values)
end
### Recorders ###
#################
@scoped_types [
Schema.ObjectTypeDefinition,
Schema.FieldDefinition,
Schema.ScalarTypeDefinition,
Schema.EnumTypeDefinition,
Schema.EnumValueDefinition,
Schema.InputObjectTypeDefinition,
Schema.UnionTypeDefinition,
Schema.InterfaceTypeDefinition,
Schema.DirectiveDefinition
]
def record!(env, type, identifier, attrs, block) when type in @scoped_types do
attrs = expand_ast(attrs, env)
scoped_def(env, type, identifier, attrs, block)
end
defp build_arg(identifier, attrs) do
attrs =
attrs
|> handle_deprecate
|> Keyword.put(:identifier, identifier)
|> Keyword.put(:name, to_string(identifier))
struct!(Schema.InputValueDefinition, attrs)
end
def record_arg!(env, identifier, attrs) do
arg = build_arg(identifier, Keyword.put(attrs, :module, env.module))
put_attr(env.module, arg)
end
@doc false
# Record a directive expand function in the current scope
def record_expand!(env, func_ast) do
put_attr(env.module, {:expand, func_ast})
end
@doc false
# Record directive AST nodes in the current scope
def record_locations!(env, locations) do
locations = expand_ast(locations, env)
put_attr(env.module, {:locations, locations})
end
@doc false
# Record a directive
def record_directive!(env, identifier, attrs, block) do
attrs =
attrs
|> Keyword.put(:identifier, identifier)
|> Keyword.put_new(:name, to_string(identifier))
scoped_def(env, Schema.DirectiveDefinition, identifier, attrs, block)
end
@doc false
# Record a parse function in the current scope
def record_parse!(env, fun_ast) do
put_attr(env.module, {:parse, fun_ast})
end
@doc false
# Record private values
def record_private!(env, owner, keyword_list) when is_list(keyword_list) do
keyword_list = expand_ast(keyword_list, env)
put_attr(env.module, {:__private__, [{owner, keyword_list}]})
end
@doc false
# Record a serialize function in the current scope
def record_serialize!(env, fun_ast) do
put_attr(env.module, {:serialize, fun_ast})
end
@doc false
# Record a type checker in the current scope
def record_is_type_of!(env, func_ast) do
put_attr(env.module, {:is_type_of, func_ast})
# :ok
end
@doc false
# Record a complexity analyzer in the current scope
def record_complexity!(env, func_ast) do
put_attr(env.module, {:complexity, func_ast})
# :ok
end
@doc false
# Record a type resolver in the current scope
def record_resolve_type!(env, func_ast) do
put_attr(env.module, {:resolve_type, func_ast})
# :ok
end
@doc false
# Record an implemented interface in the current scope
def record_interface!(env, identifier) do
put_attr(env.module, {:interface, identifier})
# Scope.put_attribute(env.module, :interfaces, identifier, accumulate: true)
# Scope.recorded!(env.module, :attr, :interface)
# :ok
end
@doc false
# Record a deprecation in the current scope
def record_deprecate!(env, msg) do
msg = expand_ast(msg, env)
deprecation = build_deprecation(msg)
put_attr(env.module, {:deprecation, deprecation})
end
@doc false
# Record a list of implemented interfaces in the current scope
def record_interfaces!(env, ifaces) do
Enum.each(ifaces, &record_interface!(env, &1))
end
@doc false
# Record a list of member types for a union in the current scope
def record_types!(env, types) do
put_attr(env.module, {:types, types})
end
@doc false
# Record an enum type
def record_enum!(env, identifier, attrs, block) do
attrs = expand_ast(attrs, env)
attrs = Keyword.put(attrs, :identifier, identifier)
scoped_def(env, :enum, identifier, attrs, block)
end
defp reformat_description(text), do: String.trim(text)
@doc false
# Record a description in the current scope
def record_description!(env, text_block) do
text = reformat_description(text_block)
put_attr(env.module, {:desc, text})
end
def handle_enum_value_attrs(identifier, raw_attrs) do
raw_attrs
|> expand_ast(raw_attrs)
|> Keyword.put(:identifier, identifier)
|> Keyword.put(:value, Keyword.get(raw_attrs, :as, identifier))
|> Keyword.put_new(:name, String.upcase(to_string(identifier)))
|> Keyword.delete(:as)
|> handle_deprecate
end
@doc false
# Record an enum value in the current scope
def record_value!(env, identifier, raw_attrs) do
attrs = handle_enum_value_attrs(identifier, raw_attrs)
record!(env, Schema.EnumValueDefinition, identifier, attrs, [])
end
@doc false
# Record an enum value in the current scope
def record_values!(env, values) do
values =
values
|> expand_ast(env)
|> Enum.map(fn ident ->
value_attrs = handle_enum_value_attrs(ident, module: env.module)
struct!(Schema.EnumValueDefinition, value_attrs)
end)
put_attr(env.module, {:values, values})
end
def record_config!(env, fun_ast) do
put_attr(env.module, {:config, fun_ast})
end
def record_trigger!(env, mutations, attrs) do
for mutation <- mutations do
put_attr(env.module, {:trigger, {mutation, attrs}})
end
end
def record_middleware!(env, new_middleware, opts) do
new_middleware =
case expand_ast(new_middleware, env) do
{module, fun} ->
{:{}, [], [{module, fun}, opts]}
atom when is_atom(atom) ->
case Atom.to_string(atom) do
"Elixir." <> _ ->
{:{}, [], [{atom, :call}, opts]}
_ ->
{:{}, [], [{env.module, atom}, opts]}
end
val ->
val
end
put_attr(env.module, {:middleware, [new_middleware]})
end
# ------------------------------
defmacro close_scope() do
put_attr(__CALLER__.module, :close)
end
def put_reference(attrs, env, identifier) do
Keyword.put(attrs, :__reference__, %{
module: env.module,
identifier: identifier,
location: %{
file: env.file,
line: env.line
}
})
end
defp scoped_def(caller, type, identifier, attrs, body) do
attrs =
attrs
|> Keyword.put(:identifier, identifier)
|> Keyword.put_new(:name, default_name(type, identifier))
|> Keyword.put(:module, caller.module)
|> put_reference(caller, identifier)
definition = struct!(type, attrs)
ref = put_attr(caller.module, definition)
[
get_desc(ref),
body,
quote(do: unquote(__MODULE__).close_scope())
]
end
defp get_desc(ref) do
quote do
unquote(__MODULE__).put_desc(__MODULE__, unquote(ref))
end
end
defp put_attr(module, thing) do
ref = :erlang.unique_integer()
Module.put_attribute(module, :absinthe_blueprint, {ref, thing})
ref
end
defp default_name(Schema.FieldDefinition, identifier) do
identifier
|> Atom.to_string()
end
defp default_name(_, identifier) do
identifier
|> Atom.to_string()
|> Absinthe.Utils.camelize()
end
defp do_import_types({{:., _, [root_ast, :{}]}, _, modules_ast_list}, env, opts) do
{:__aliases__, _, root} = root_ast
root_module = Module.concat(root)
root_module_with_alias = Keyword.get(env.aliases, root_module, root_module)
for {_, _, leaf} <- modules_ast_list do
type_module = Module.concat([root_module_with_alias | leaf])
if Code.ensure_loaded?(type_module) do
do_import_types(type_module, env, opts)
else
raise ArgumentError, "module #{type_module} is not available"
end
end
end
defp do_import_types(module, env, opts) do
Module.put_attribute(env.module, :__absinthe_type_imports__, [
{module, opts} | Module.get_attribute(env.module, :__absinthe_type_imports__) || []
])
[]
end
defp do_import_sdl(env, nil, opts) do
with {:ok, path} <- Keyword.fetch(opts, :path),
sdl <- File.read!(path) do
Module.put_attribute(env.module, :external_resource, path)
do_import_types(env, sdl, Keyword.delete(opts, :path))
else
:error ->
raise Absinthe.Schema.Notation.Error,
"Must provide `:path` option to `import_sdl` unless passing a raw SDL string as the first argument"
end
end
defp do_import_sdl(env, sdl, _opts) when is_binary(sdl) do
with {:ok, definitions} <- __MODULE__.SDL.parse(sdl, env.module) do
Module.put_attribute(
env.module,
:__absinthe_sdl_definitions__,
definitions ++ (Module.get_attribute(env.module, :__absinthe_sdl_definitions__) || [])
)
[]
else
{:error, error} ->
raise Absinthe.Schema.Notation.Error, "`import_sdl` could not parse SDL:\n#{error}"
end
end
def put_desc(module, ref) do
Module.put_attribute(module, :absinthe_desc, {ref, Module.get_attribute(module, :desc)})
Module.put_attribute(module, :desc, nil)
end
def noop(_desc) do
:ok
end
defmacro __before_compile__(env) do
module_attribute_descs =
env.module
|> Module.get_attribute(:absinthe_desc)
|> Map.new()
attrs =
env.module
|> Module.get_attribute(:absinthe_blueprint)
|> List.insert_at(0, :close)
|> reverse_with_descs(module_attribute_descs)
imports =
(Module.get_attribute(env.module, :__absinthe_type_imports__) || [])
|> Enum.uniq()
|> Enum.map(fn
module when is_atom(module) -> {module, []}
other -> other
end)
schema_def = %Schema.SchemaDefinition{
imports: imports,
module: env.module
}
blueprint =
attrs
|> List.insert_at(1, schema_def)
|> Absinthe.Blueprint.Schema.build()
# TODO: handle multiple schemas
[schema] = blueprint.schema_definitions
{schema, functions} = lift_functions(schema, env.module)
sdl_definitions =
(Module.get_attribute(env.module, :__absinthe_sdl_definitions__) || [])
|> List.flatten()
|> Enum.map(fn type_definition ->
Absinthe.Blueprint.prewalk(type_definition, fn
%{module: _} = node ->
%{node | module: env.module}
node ->
node
end)
end)
schema = Map.update!(schema, :type_definitions, &(sdl_definitions ++ &1))
blueprint = %{blueprint | schema_definitions: [schema]}
quote do
unquote(__MODULE__).noop(@desc)
def __absinthe_blueprint__ do
unquote(Macro.escape(blueprint))
end
unquote_splicing(functions)
end
end
def lift_functions(schema, origin) do
Absinthe.Blueprint.prewalk(schema, [], &lift_functions(&1, &2, origin))
end
def lift_functions(node, acc, origin) do
{node, ast} = functions_for_type(node, origin)
{node, ast ++ acc}
end
def grab_functions(origin, type, identifier, attrs) do
{ast, type} =
Enum.flat_map_reduce(attrs, type, fn attr, type ->
value = Map.fetch!(type, attr)
ast =
quote do
def __absinthe_function__(unquote(identifier), unquote(attr)) do
unquote(value)
end
end
type = %{type | attr => {:ref, origin, identifier}}
{[ast], type}
end)
{type, ast}
end
defp functions_for_type(%Schema.FieldDefinition{} = type, origin) do
grab_functions(
origin,
type,
{Schema.FieldDefinition, type.function_ref},
Schema.functions(Schema.FieldDefinition)
)
end
defp functions_for_type(%module{identifier: identifier} = type, origin) do
grab_functions(origin, type, {module, identifier}, Schema.functions(module))
end
defp functions_for_type(type, _) do
{type, []}
end
@doc false
def __ensure_middleware__([], _field, %{identifier: :subscription}) do
[Absinthe.Middleware.PassParent]
end
def __ensure_middleware__([], %{identifier: identifier}, _) do
[{Absinthe.Middleware.MapGet, identifier}]
end
def __ensure_middleware__(middleware, _field, _object) do
middleware
end
defp reverse_with_descs(attrs, descs, acc \\ [])
defp reverse_with_descs([], _descs, acc), do: acc
defp reverse_with_descs([{ref, attr} | rest], descs, acc) do
if desc = Map.get(descs, ref) do
reverse_with_descs(rest, descs, [attr, {:desc, desc} | acc])
else
reverse_with_descs(rest, descs, [attr | acc])
end
end
defp reverse_with_descs([attr | rest], descs, acc) do
reverse_with_descs(rest, descs, [attr | acc])
end
defp expand_ast(ast, env) do
Macro.prewalk(ast, fn
{_, _, _} = node ->
Macro.expand(node, env)
node ->
node
end)
end
@doc false
# Ensure the provided operation can be recorded in the current environment,
# in the current scope context
def recordable!(env, _usage) do
env
end
def recordable!(env, _usage, _kw_rules, _opts \\ []) do
env
end
end
defmodule Absinthe.Schema.Error do
@moduledoc """
Exception raised when a schema is invalid
"""
defexception message: "Invalid schema", details: []
@type detail_t :: %{
rule: Absinthe.Schema.Rule.t(),
location: %{file: binary, line: pos_integer},
data: any
}
def exception(details) do
detail = Enum.map(details, &format_detail/1) |> Enum.join("\n")
%__MODULE__{message: "Invalid schema:\n" <> detail <> "\n", details: details}
end
def format_detail(detail) do
explanation = indent(detail.rule.explanation(detail))
"#{detail.location.file}:#{detail.location.line}: #{explanation}\n"
end
defp indent(text) do
text
|> String.trim()
|> String.split("\n")
|> Enum.map(&" #{&1}")
|> Enum.join("\n")
|> String.trim_leading()
end
end
defmodule Absinthe.Schema.Verification do
@moduledoc false
alias __MODULE__
alias Absinthe.Traversal
alias Absinthe.Schema
@spec setup(Schema.t()) :: Schema.t()
def setup(%{errors: []} = schema) do
errors = Traversal.reduce(schema, schema, schema.errors, &collect_errors/3)
%{schema | errors: errors}
|> Verification.Unions.check()
end
def setup(schema) do
schema
end
# Don't allow anything named with a __ prefix
@spec collect_errors(Traversal.Node.t(), Traversal.t(), [binary]) :: Traversal.instruction_t()
defp collect_errors(%{__struct__: definition_type, name: "__" <> name}, traversal, errs) do
definition_name = definition_type |> Module.split() |> List.last()
errs = [format_error(:double_underscore, %{definition: definition_name, name: name}) | errs]
{:ok, errs, traversal}
end
# No-op
defp collect_errors(_node, traversal, errs) do
{:ok, errs, traversal}
end
defp format_error(:double_underscore, %{definition: definition, name: name}) do
"#{definition} `__#{name}': Must not define any types, fields, arguments, or any other type system artifact with two leading underscores."
end
end
defmodule Absinthe.Schema.Rule.TypeNamesAreUnique do
use Absinthe.Schema.Rule
@moduledoc false
@description """
References to types must be unique.
> All types within a GraphQL schema must have unique names. No two provided
> types may have the same name. No provided type may have a name which
> conflicts with any built in types (including Scalar and Introspection
> types).
Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#type-system
"""
def explanation(%{data: %{artifact: artifact, value: name}}) do
"""
#{artifact} #{inspect(name)} is not unique.
#{@description}
"""
end
# This rule is only used for its explanation. Error details are added during
# compilation.
def check(_), do: []
end
defmodule Absinthe.Schema.Rule.ObjectInterfacesMustBeValid do
use Absinthe.Schema.Rule
alias Absinthe.Schema
alias Absinthe.Type
@moduledoc false
@description """
Only interfaces may be present in an Object's interface list.
Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#interfaces
"""
def explanation(%{data: %{object: obj, interface: interface}}) do
"""
Type "#{obj}" cannot implement non-interface type "#{interface}"
#{@description}
"""
end
def check(schema) do
Schema.types(schema)
|> Enum.flat_map(&check_type(schema, &1))
end
defp check_type(schema, %{interfaces: ifaces} = type) do
ifaces
|> Enum.map(&Schema.lookup_type(schema, &1))
|> Enum.reduce([], fn
nil, _ ->
raise "No type found in #{inspect(ifaces)}"
%Type.Interface{}, acc ->
acc
iface_type, acc ->
[
report(type.__reference__.location, %{object: type.name, interface: iface_type.name})
| acc
]
end)
end
defp check_type(_, _) do
[]
end
end
defmodule Absinthe.Schema.Rule.ObjectMustImplementInterfaces do
use Absinthe.Schema.Rule
alias Absinthe.Schema
alias Absinthe.Type
@moduledoc false
@description """
An object type must be a super-set of all interfaces it implements.
* The object type must include a field of the same name for every field
defined in an interface.
* The object field must be of a type which is equal to or a sub-type of
the interface field (covariant).
* An object field type is a valid sub-type if it is equal to (the same
type as) the interface field type.
* An object field type is a valid sub-type if it is an Object type and the
interface field type is either an Interface type or a Union type and the
object field type is a possible type of the interface field type.
* An object field type is a valid sub-type if it is a List type and the
interface field type is also a List type and the list-item type of the
object field type is a valid sub-type of the list-item type of the
interface field type.
* An object field type is a valid sub-type if it is a Non-Null variant of a
valid sub-type of the interface field type.
* The object field must include an argument of the same name for every
argument defined in the interface field.
* The object field argument must accept the same type (invariant) as the
interface field argument.
* The object field may include additional arguments not defined in the
interface field, but any additional argument must not be required.
Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#object-type-validation
"""
def explanation(%{data: %{object: obj, interface: interface}}) do
"""
Type "#{obj}" does not fully implement interface type "#{interface}"
#{@description}
"""
end
def check(schema) do
schema
|> Schema.types()
|> Enum.flat_map(&check_type(schema, &1))
end
defp check_type(schema, %{interfaces: ifaces} = type) do
ifaces
|> Enum.map(&Schema.lookup_type(schema, &1))
|> Enum.reduce([], fn
%Type.Interface{} = iface_type, acc ->
if Type.Interface.implements?(iface_type, type, schema) do
acc
else
[
report(type.__reference__.location, %{object: type.name, interface: iface_type.name})
| acc
]
end
_, _ ->
# Handles by a different rule
[]
end)
end
defp check_type(_, _) do
[]
end
end
defmodule Absinthe.Schema.Rule.QueryTypeMustBeObject do
use Absinthe.Schema.Rule
alias Absinthe.Schema
require IEx
@moduledoc false
@description """
#Example
defmodule MyApp.Schema do
use Absinthe.Schema
query do
#Fields go here
end
end
--------------------------------------
From the graqhql schema specification
A GraphQL schema includes types, indicating where query and mutation
operations start. This provides the initial entry points into the type system.
The query type must always be provided, and is an Object base type. The
mutation type is optional; if it is null, that means the system does not
support mutations. If it is provided, it must be an object base type.
Reference: https://facebook.github.io/graphql/#sec-Initial-types
"""
def explanation(_value) do
"""
The root query type must be implemented and be a of type Object
#{@description}
"""
end
def check(schema) do
case Schema.lookup_type(schema, :query) do
%Absinthe.Type.Object{} ->
[]
# Real error message
_ ->
[report(%{file: schema, line: 0}, %{})]
end
end
end
defmodule Absinthe.Schema.Rule.TypeNamesAreReserved do
use Absinthe.Schema.Rule
alias Absinthe.Schema
@moduledoc false
@description """
Type system artifacts must not begin with two leading underscores.
> GraphQL type system authors must not define any types, fields, arguments,
> or any other type system artifact with two leading underscores.
Reference: https://github.com/facebook/graphql/blob/master/spec/Section%204%20--%20Introspection.md#naming-conventions
"""
def explanation(%{data: %{artifact: artifact, value: value}}) do
artifact_name = String.capitalize(artifact)
"""
#{artifact_name} #{inspect(value)} starts with two leading underscores.
#{@description}
"""
end
def check(schema) do
Enum.flat_map(Schema.types(schema), &check_type(schema, &1)) ++
Enum.flat_map(Schema.directives(schema), &check_directive(schema, &1))
end
defp check_type(schema, %{fields: fields} = type) do
check_named(schema, type, "type", type) ++
Enum.flat_map(fields |> Map.values(), &check_field(schema, type, &1))
end
defp check_type(schema, type) do
check_named(schema, type, "type", type)
end
defp check_field(schema, type, field) do
check_named(schema, type, "field", field) ++
Enum.flat_map(field.args |> Map.values(), &check_arg(schema, type, &1))
end
defp check_directive(schema, directive) do
check_named(schema, directive, "directive", directive) ++
Enum.flat_map(directive.args |> Map.values(), &check_arg(schema, directive, &1))
end
defp check_arg(schema, type, arg) do
check_named(schema, type, "argument", arg)
end
defp check_named(_schema, type, kind, %{name: "__" <> _} = entity) do
if Absinthe.Type.built_in?(type) do
[]
else
[report(entity.__reference__.location, %{artifact: "#{kind} name", value: entity.name})]
end
end
defp check_named(_, _, _, _) do
[]
end
end
defmodule Absinthe.Schema.Rule.NoCircularFieldImports do
@moduledoc false
# This has to be run prior to the module compilation, and is called
# from Notation.Writer instead of Rule
def check({definitions, errors}) do
acc = []
graph = :digraph.new([:acyclic])
do_check(definitions, graph, errors, acc)
end
defp do_check([], graph, errors, acc) do
:digraph.delete(graph)
{:lists.reverse(acc), errors}
end
defp do_check([definition | rest], graph, errors, acc) do
{acc, errors} =
definition.attrs
|> Keyword.get(:field_imports)
|> case do
[_ | _] = imports ->
check_imports(definition, imports, graph, errors, acc)
_ ->
{[definition | acc], errors}
end
do_check(rest, graph, errors, acc)
end
defp check_imports(definition, imports, graph, errors, acc) do
:digraph.add_vertex(graph, definition.identifier)
Enum.reduce(imports, [], fn {ref, _}, errors ->
:digraph.add_vertex(graph, ref)
case :digraph.add_edge(graph, definition.identifier, ref) do
{:error, {:bad_edge, path}} ->
# All just error generation logic
deps =
[definition.identifier | path]
|> Enum.map(&"`#{&1}'")
|> Enum.join(" => ")
msg =
String.trim("""
Field Import Cycle Error
Field Import in object `#{definition.identifier}' `import_fields(#{inspect(ref)}) forms a cycle via: (#{
deps
})
""")
error = %{
rule: __MODULE__,
location: %{file: definition.file, line: definition.line},
data: %{artifact: msg, value: ref}
}
[error | errors]
_ ->
errors
end
end)
|> case do
[] -> {[definition | acc], errors}
new_errors -> {acc, new_errors ++ errors}
end
end
end
defmodule Absinthe.Schema.Rule.TypeNamesAreValid do
use Absinthe.Schema.Rule
alias Absinthe.Schema
alias Absinthe.Type
@moduledoc false
@description """
Types must exist if referenced.
"""
def explanation(%{data: %{identifier: identifier, parent: parent}}) do
artifact_name = String.capitalize(parent.name)
"""
#{artifact_name} #{inspect(identifier)} is not defined in your schema.
#{@description}
"""
end
def check(schema) do
Enum.reduce(schema.__absinthe_types__, [], fn {identifier, name}, acc ->
schema
|> Schema.lookup_type(identifier)
|> case do
nil -> Schema.lookup_type(schema, name)
val -> val
end
|> check_type(acc, schema)
end)
end
defp check_type(type, acc, schema) do
check_type(type, type, acc, schema)
end
# I could do this in fewer clauses by simply matching on the inner properties
# that we care about, but by doing it this way you can easily scan the list
# and compare it to the modules in absinthe/type/*.ex to see it's complete.
defp check_type(identifier, parent, acc, schema) when is_atom(identifier) do
case schema.__absinthe_type__(identifier) do
nil ->
data = %{parent: parent, identifier: identifier}
[report(parent.__reference__.location, data) | acc]
_ ->
acc
end
end
defp check_type(%Type.Argument{} = arg, _, acc, schema) do
check_type(arg.type, arg, acc, schema)
end
defp check_type(%Type.Directive{} = type, _, acc, schema) do
type.args
|> Map.values()
|> Enum.reduce(acc, &check_type(&1, type, &2, schema))
end
defp check_type(%Type.Enum{} = type, _, acc, schema) do
type.values
|> Map.values()
|> Enum.reduce(acc, &check_type(&1, type, &2, schema))
end
defp check_type(%Type.Enum.Value{}, _, acc, _schema) do
acc
end
defp check_type(%Type.Field{} = field, _, acc, schema) do
acc =
field.args
|> Map.values()
|> Enum.reduce(acc, &check_type(&1, field, &2, schema))
check_type(field.type, field, acc, schema)
end
defp check_type(%Type.InputObject{} = object, _, acc, schema) do
object.fields
|> Map.values()
|> Enum.reduce(acc, &check_type(&1, object, &2, schema))
end
defp check_type(%Type.Interface{} = interface, _, acc, schema) do
interface.fields
|> Map.values()
|> Enum.reduce(acc, &check_type(&1, interface, &2, schema))
end
defp check_type(%Type.List{of_type: inner_type}, parent, acc, schema) do
check_type(inner_type, parent, acc, schema)
end
defp check_type(%Type.NonNull{of_type: inner_type}, parent, acc, schema) do
check_type(inner_type, parent, acc, schema)
end
defp check_type(%Type.Object{} = object, _, acc, schema) do
object.fields
|> Map.values()
|> Enum.reduce(acc, &check_type(&1, object, &2, schema))
end
defp check_type(%Type.Reference{} = ref, _, acc, schema) do
check_type(ref.identifier, ref, acc, schema)
end
defp check_type(%Type.Scalar{}, _, acc, _schema) do
acc
end
defp check_type(%Type.Union{} = union, _, acc, schema) do
union.types
|> Enum.reduce(acc, &check_type(&1, union, &2, schema))
end
end
defmodule Absinthe.Schema.Rule.FieldImportsExist do
@moduledoc false
# This has to be run prior to the module compilation, and is called
# from Notation.Writer instead of Rule
def check({definitions, errors}) do
definition_map = build_definition_map(definitions)
errors =
Enum.reduce(definitions, errors, fn definition, errors ->
definition.attrs
|> Keyword.get(:field_imports)
|> case do
[_ | _] = imports ->
check_imports(definition, imports, definition_map, errors)
_ ->
errors
end
end)
{definitions, errors}
end
defp check_imports(definition, imports, definition_map, errors) do
Enum.reduce(imports, errors, fn {ref, _}, errors ->
case Map.fetch(definition_map, ref) do
{:ok, _} ->
errors
_ ->
[error(definition, ref) | errors]
end
end)
end
defp build_definition_map(definitions) do
definitions
|> Enum.filter(&Map.get(&1, :identifier))
|> Map.new(&{&1.identifier, &1})
end
def explanation(%{data: %{artifact: msg}}) do
"""
#{msg}
"""
|> String.trim()
end
defp error(definition, ref) do
msg =
"""
Field Import Error
Object #{inspect(definition.identifier)} imports fields from #{inspect(ref)} but
#{inspect(ref)} does not exist in the schema!
"""
|> String.trim()
%{
data: %{artifact: msg, value: ref},
location: %{file: definition.file, line: definition.line},
rule: __MODULE__
}
end
end
defmodule Absinthe.Schema.Rule.InterfacesMustResolveTypes do
use Absinthe.Schema.Rule
alias Absinthe.Schema
alias Absinthe.Type
@moduledoc false
@description """
An interface must be able to resolve the implementing types of results.
> The interface type should have some way of determining which object a given
> result corresponds to.
Reference: https://github.com/facebook/graphql/blob/master/spec/Section%203%20--%20Type%20System.md#interfaces
"""
def explanation(%{data: interface}) do
"""
Interface type "#{interface}" either:
* Does not have a `resolve_type` function.
* Is missing a `is_type_of` function on all implementing types.
#{@description}
"""
end
def check(schema) do
Schema.types(schema)
|> Enum.flat_map(&check_type(schema, &1))
end
defp check_type(schema, %Type.Interface{} = type) do
if Type.Interface.type_resolvable?(schema, type) do
[]
else
[report(type.__reference__.location, type.name)]
end
end
defp check_type(_, _) do
[]
end
end
defmodule Absinthe.Schema.Rule.DefaultEnumValuePresent do
use Absinthe.Schema.Rule
alias Absinthe.{Schema, Type}
require IEx
@moduledoc false
def explanation(%{data: %{default_value: default_value, type: type, value_list: value_list}}) do
"""
The default_value for an enum must be present in the enum values.
Could not use default value of "#{default_value}" for #{inspect(type)}.
Valid values are:
#{value_list}
"""
end
def check(schema) do
Schema.types(schema)
|> Enum.flat_map(&check_type(schema, &1))
end
defp check_type(schema, %Type.Object{fields: fields}) when not is_nil(fields) do
Enum.flat_map(fields, &check_field(schema, &1))
end
defp check_type(_schema, _type), do: []
defp check_field(schema, {_name, %{args: args}}) when not is_nil(args) do
Enum.flat_map(args, &check_args(schema, &1))
end
defp check_field(_schema, _type), do: []
defp check_args(schema, {_name, %{default_value: default_value, type: type}})
when not is_nil(default_value) do
type = Schema.lookup_type(schema, type)
check_default_value_present(default_value, type)
end
defp check_args(_schema, _args), do: []
defp check_default_value_present(default_value, %Type.Enum{} = type) do
values = Enum.map(type.values, &elem(&1, 1).value)
value_list = Enum.map(values, &"\n * #{&1}")
if not (default_value in values) do
detail = %{
value_list: value_list,
type: type.__reference__.identifier,
default_value: default_value
}
[report(type.__reference__.location, detail)]
else
[]
end
end
defp check_default_value_present(_default_value, _type), do: []
end
defmodule Absinthe.Schema.Rule.InputOuputTypesCorrectlyPlaced do
use Absinthe.Schema.Rule
alias Absinthe.Schema
alias Absinthe.Type
@moduledoc false
@description """
Only input types may be used as inputs. Input types may not be used as output types
Input types consist of Scalars, Enums, and Input Objects.
"""
def explanation(%{data: %{argument: argument, type: type, struct: struct}}) do
"""
#{inspect(type)} is not a valid input type for argument #{inspect(argument)} because
it is an #{Macro.to_string(struct)} type. Arguments may only be input types.
#{@description}
"""
end
def explanation(%{data: %{field: field, type: type, struct: struct, parent: parent}}) do
"""
#{inspect(type)} is not a valid type for field #{inspect(field)} because
it is an #{Macro.to_string(struct)} type, and the parent of this field is an #{
Macro.to_string(parent)
} type.
#{@description}
"""
end
def check(schema) do
Schema.types(schema)
|> Enum.flat_map(&check_type(schema, &1))
end
defp check_type(schema, %Type.Object{} = type) do
field_errors =
for {_, field} <- type.fields,
field_type = get_type(field, schema),
!output_type?(field_type) do
detail = %{
field: field.identifier,
type: field_type.__reference__.identifier,
struct: field_type.__struct__,
parent: Type.Object
}
report(type.__reference__.location, detail)
end
argument_errors =
for {_, field} <- type.fields,
{_, arg} <- field.args,
type = get_type(arg, schema),
!input_type?(type) do
detail = %{
argument: arg.__reference__.identifier,
type: type.__reference__.identifier,
struct: type.__struct__
}
report(type.__reference__.location, detail)
end
field_errors ++ argument_errors
end
defp check_type(schema, %Type.InputObject{} = type) do
for {_, field} <- type.fields,
field_type = get_type(field, schema),
!input_type?(field_type) do
detail = %{
field: field.identifier,
type: field_type.__reference__.identifier,
struct: field_type.__struct__,
parent: Type.InputObject
}
report(type.__reference__.location, detail)
end
end
defp check_type(_, _) do
[]
end
defp get_type(%{type: type}, schema) do
Type.expand(type, schema)
|> Type.unwrap()
end
defp get_type(type, schema) do
Type.expand(type, schema)
|> Type.unwrap()
end
defp input_type?(%Type.Scalar{}), do: true
defp input_type?(%Type.Enum{}), do: true
defp input_type?(%Type.InputObject{}), do: true
defp input_type?(_), do: false
defp output_type?(%Type.InputObject{}), do: false
defp output_type?(_), do: true
end
defmodule Absinthe.Schema.Verification.Unions do
@moduledoc false
alias Absinthe.Schema
alias Absinthe.Type
@spec check(Schema.t()) :: Schema.t()
def check(schema) do
schema
|> unions
|> Enum.reduce(schema, fn %{types: concrete_types} = union, acc ->
check_resolvers(union, concrete_types, acc)
end)
end
# Find the union types
@spec unions(Schema.t()) :: [Type.Union.t()]
defp unions(schema) do
schema.types.by_identifier
|> Map.values()
|> Enum.filter(fn type -> match?(%Type.Union{}, type) end)
end
defp check_resolvers(_union, [], schema) do
schema
end
defp check_resolvers(
%{resolve_type: nil, __reference__: %{identifier: ident}} = union,
[concrete_type_ident | rest],
schema
) do
case schema.types[concrete_type_ident] do
nil ->
err = "Could not find concrete type :#{concrete_type_ident} for union type :#{ident}"
check_resolvers(union, rest, %{schema | errors: [err | schema.errors]})
%{is_type_of: nil} ->
err =
"Union type :#{ident} does not provide a `resolve_type` function and concrete type :#{
concrete_type_ident
} does not provide an `is_type_of` function. There is no way to resolve this concrete type during execution."
check_resolvers(union, rest, %{schema | errors: [err | schema.errors]})
%{is_type_of: _} ->
check_resolvers(union, rest, schema)
end
end
defp check_resolvers(%{resolve_type: _}, _concrete_types, schema) do
schema
end
end
defmodule Absinthe.Schema.Notation.Error do
@moduledoc """
Exception raised when a schema is invalid
"""
defexception message: "Invalid notation schema"
def exception(message) do
%__MODULE__{message: message}
end
end
defmodule Absinthe.Schema.Notation.SDL do
@moduledoc false
@doc """
Parse definitions from SDL source
"""
@spec parse(sdl :: String.t(), Module.t()) ::
{:ok, [Absinthe.Blueprint.Schema.type_t()]} | {:error, String.t()}
def parse(sdl, module) do
with {:ok, doc} <- Absinthe.Phase.Parse.run(sdl) do
definitions =
doc.input.definitions
|> Enum.map(&Absinthe.Blueprint.Draft.convert(&1, doc))
|> Enum.map(fn type -> %{type | module: module} end)
{:ok, definitions}
else
{:error, %Absinthe.Blueprint{execution: %{validation_errors: [_ | _] = errors}}} ->
error =
errors
|> Enum.map(&"#{&1.message} (#{inspect(&1.locations)})")
|> Enum.join("\n")
{:error, error}
other ->
other
end
end
end
defmodule Absinthe.Schema.Notation.Scope do
@moduledoc false
@stack :absinthe_notation_scopes
defstruct name: nil, recordings: [], attrs: []
use Absinthe.Type.Fetch
def open(name, mod, attrs \\ []) do
Module.put_attribute(mod, @stack, [%__MODULE__{name: name, attrs: attrs} | on(mod)])
end
def close(mod) do
{current, rest} = split(mod)
Module.put_attribute(mod, @stack, rest)
current
end
def split(mod) do
case on(mod) do
[] ->
{nil, []}
[current | rest] ->
{current, rest}
end
end
def current(mod) do
{c, _} = split(mod)
c
end
def recorded!(mod, kind, identifier) do
update_current(mod, fn
%{recordings: recs} = scope ->
%{scope | recordings: [{kind, identifier} | recs]}
nil ->
# Outside any scopes, ignore
nil
end)
end
@doc """
Check if a certain operation has been recorded in the current scope.
## Examples
See if an input object with the identifier `:input` has been defined from
this scope:
```
recorded?(mod, :input_object, :input)
```
See if the `:description` attribute has been
```
recorded?(mod, :attr, :description)
```
"""
@spec recorded?(atom, atom, atom) :: boolean
def recorded?(mod, kind, identifier) do
scope = current(mod)
case kind do
:attr ->
# Supports attributes passed directly to the macro that
# created the scope, usually (?) short-circuits the need to
# check the scope recordings.
scope.attrs[identifier] || recording_marked?(scope, kind, identifier)
_ ->
recording_marked?(scope, kind, identifier)
end
end
# Check the list of recordings for `recorded?/3`
defp recording_marked?(scope, kind, identifier) do
scope.recordings
|> Enum.find(fn
{^kind, ^identifier} ->
true
_ ->
false
end)
end
def put_attribute(mod, key, value, opts \\ [accumulate: false]) do
if opts[:accumulate] do
update_current(mod, fn scope ->
new_attrs = update_in(scope.attrs, [key], &[value | &1 || []])
%{scope | attrs: new_attrs}
end)
else
update_current(mod, fn scope ->
%{scope | attrs: Keyword.put(scope.attrs, key, value)}
end)
end
end
defp update_current(mod, fun) do
{current, rest} = split(mod)
updated = fun.(current)
Module.put_attribute(mod, @stack, [updated | rest])
end
def on(mod) do
case Module.get_attribute(mod, @stack) do
nil ->
Module.put_attribute(mod, @stack, [])
[]
value ->
value
end
end
end
defmodule Absinthe.Schema.Notation.Definition do
@moduledoc false
defstruct category: nil,
source: nil,
identifier: nil,
builder: nil,
attrs: [],
opts: [],
file: nil,
line: nil
end
defmodule Absinthe.Schema.Notation.Writer do
@moduledoc false
defstruct [
:env,
type_map: %{},
directive_map: %{},
errors: [],
type_functions: [],
directive_functions: [],
exports: [],
implementors: %{}
]
defmacro __before_compile__(env) do
info = build_info(env)
errors = Macro.escape(info.errors)
exports = Macro.escape(info.exports)
type_map = Macro.escape(info.type_map)
implementors = Macro.escape(info.implementors)
directive_map = Macro.escape(info.directive_map)
[
quote do
def __absinthe_types__, do: unquote(type_map)
end,
info.type_functions,
quote do
def __absinthe_type__(_), do: nil
end,
quote do
def __absinthe_directives__, do: unquote(directive_map)
end,
info.directive_functions,
quote do
def __absinthe_directive__(_), do: nil
end,
quote do
def __absinthe_errors__, do: unquote(errors)
def __absinthe_interface_implementors__, do: unquote(implementors)
def __absinthe_exports__, do: unquote(exports)
end
]
end
defp init_implementors(nil) do
%{}
end
defp init_implementors(modules) do
modules
|> Enum.map(& &1.__absinthe_interface_implementors__)
|> Enum.reduce(%{}, fn implementors, acc ->
Map.merge(implementors, acc, fn _k, v1, v2 ->
v1 ++ v2
end)
end)
end
def build_info(env) do
implementors =
env.module
|> Module.get_attribute(:absinthe_imports)
|> init_implementors
descriptions =
env.module
|> Module.get_attribute(:absinthe_descriptions)
|> Map.new()
definitions =
env.module
|> Module.get_attribute(:absinthe_definitions)
|> Enum.map(&update_description(&1, descriptions))
{definitions, errors} =
{definitions, []}
|> Absinthe.Schema.Rule.FieldImportsExist.check()
|> Absinthe.Schema.Rule.NoCircularFieldImports.check()
info = %__MODULE__{
env: env,
errors: errors,
implementors: implementors
}
Enum.reduce(definitions, info, &do_build_info/2)
end
defp type_functions(definition) do
ast = build(:type, definition)
identifier = definition.identifier
name = definition.attrs[:name]
result = [
quote(do: def(__absinthe_type__(unquote(name)), do: __absinthe_type__(unquote(identifier))))
]
if definition.builder == Absinthe.Type.Object do
[
quote do
def __absinthe_type__(unquote(identifier)) do
unquote(ast)
end
end,
result
]
else
[
quote do
def __absinthe_type__(unquote(identifier)), do: unquote(ast)
end,
result
]
end
end
defp directive_functions(definition) do
ast = build(:directive, definition)
identifier = definition.identifier
name = definition.attrs[:name]
quote do
def __absinthe_directive__(unquote(identifier)), do: unquote(ast)
def __absinthe_directive__(unquote(name)), do: __absinthe_directive__(unquote(identifier))
end
end
# Type import reference
defp build(:type, %{source: source, builder: nil} = definition) do
quote bind_quoted: [source: source, identifier: definition.identifier] do
source.__absinthe_type__(identifier)
end
end
# Directive import reference
defp build(:directive, %{source: source, builder: nil} = definition) do
quote bind_quoted: [source: source, identifier: definition.identifier] do
source.__absinthe_directive__(identifier)
end
end
# Type/Directive definition
defp build(_, %{source: nil, builder: builder} = definition) do
builder.build(definition)
end
defp directive_name_error(definition) do
%{
rule: Absinthe.Schema.Rule.TypeNamesAreUnique,
location: %{file: definition.file, line: definition.line},
data: %{artifact: "Absinthe directive identifier", value: definition.identifier}
}
end
defp type_name_error(artifact, value, definition) do
%{
rule: Absinthe.Schema.Rule.TypeNamesAreUnique,
location: %{file: definition.file, line: definition.line},
data: %{artifact: artifact, value: value}
}
end
defp directive_errors(definition, state) do
case Map.has_key?(state.directive_map, definition.identifier) do
true ->
[directive_name_error(definition)]
false ->
[]
end
end
defp type_errors(definition, state) do
[
if Map.has_key?(state.type_map, definition.identifier) do
type_name_error("Absinthe type identifier", definition.identifier, definition)
end,
if Enum.member?(Map.values(state.type_map), definition.attrs[:name]) do
type_name_error("Type name", definition.attrs[:name], definition)
end
]
|> Enum.reject(&is_nil/1)
end
defp update_description(definition, descriptions) do
case Map.get(descriptions, definition.identifier) do
nil -> definition
desc -> Map.update!(definition, :attrs, &Keyword.put(&1, :description, desc))
end
end
defp do_build_info(%{category: :directive} = definition, info) do
errors = directive_errors(definition, info)
info
|> update_directive_map(definition)
|> update_directive_functions(definition, errors)
|> update_exports(definition)
|> update_errors(errors)
end
defp do_build_info(%{category: :type} = definition, info) do
errors = type_errors(definition, info)
info
|> update_type_map(definition)
|> update_type_functions(definition, errors)
|> update_implementors(definition)
|> update_exports(definition)
|> update_errors(errors)
end
defp update_directive_map(info, definition) do
Map.update!(
info,
:directive_map,
&Map.put(&1, definition.identifier, definition.attrs[:name])
)
end
defp update_directive_functions(info, definition, []) do
Map.update!(info, :directive_functions, &[directive_functions(definition) | &1])
end
defp update_type_map(info, definition) do
Map.update!(info, :type_map, &Map.put(&1, definition.identifier, definition.attrs[:name]))
end
defp update_type_functions(info, definition, []) do
Map.update!(info, :type_functions, &[type_functions(definition) | &1])
end
defp update_type_functions(info, _definition, _errors), do: info
defp update_implementors(info, definition) do
implementors =
definition.attrs[:interfaces]
|> List.wrap()
|> Enum.reduce(info.implementors, fn iface, implementors ->
Map.update(implementors, iface, [definition.identifier], &[definition.identifier | &1])
end)
%{info | implementors: implementors}
end
defp update_exports(info, definition) do
exports =
if Keyword.get(definition.opts, :export, definition.source != Absinthe.Type.BuiltIns) do
[definition.identifier | info.exports]
else
info.exports
end
%{info | exports: exports}
end
defp update_errors(info, errors) do
%{info | errors: errors ++ info.errors}
end
end
defmodule Absinthe.Schema.Rule do
@moduledoc false
alias __MODULE__
defmacro __using__(_opts) do
quote do
@behaviour unquote(__MODULE__)
def report(location, data) do
%{
rule: __MODULE__,
location: location,
data: data
}
end
end
end
@callback check(Absinthe.Schema.t()) :: [Absinthe.Schema.Error.detail_t()]
@callback explanation(Absinthe.Schema.Error.detail_t()) :: binary
@type t :: module
@rules [
Rule.QueryTypeMustBeObject,
Rule.TypeNamesAreReserved,
Rule.TypeNamesAreValid,
Rule.ObjectInterfacesMustBeValid,
Rule.ObjectMustImplementInterfaces,
Rule.InterfacesMustResolveTypes,
Rule.InputOuputTypesCorrectlyPlaced,
Rule.DefaultEnumValuePresent
]
@spec check(Absinthe.Schema.t()) :: [Absinthe.Schema.Error.detail_t()]
def check(schema) do
Enum.flat_map(@rules, & &1.check(schema))
end
end
defmodule Absinthe.Resolution do
@moduledoc """
Information about the current resolution. It is created by adding field specific
information to the more general `%Absinthe.Blueprint.Execution{}` struct.
In many ways like the `%Conn{}` from `Plug`, the `%Absinthe.Resolution{}` is the
piece of information that passed along from middleware to middleware as part of
resolution.
## Contents
- `:adapter` - The adapter used for any name conversions.
- `:definition` - The Blueprint definition for this field.
- `:context` - The context passed to `Absinthe.run`.
- `:root_value` - The root value passed to `Absinthe.run`, if any.
- `:parent_type` - The parent type for the field.
- `:private` - Operates similarly to the `:private` key on a `%Plug.Conn{}`
and is a place for libraries (and similar) to store their information.
- `:schema` - The current schema.
- `:source` - The resolved parent object; source of this field.
When a `%Resolution{}` is accessed via middleware, you may want to update the
context (e.g. to cache a dataloader instance or the result of an ecto query).
Updating the context can be done simply by using the map updating syntax (or
`Map.put/4`):
```elixir
%{resolution | context: new_context}
# OR
Map.put(resolution, :context, new_context)
```
To access the schema type for this field, see the `definition.schema_node`.
"""
@typedoc """
The arguments that are passed from the schema. (e.g. id of the record to be
fetched)
"""
@type arguments :: %{optional(atom) => any}
@type source :: any
@type t :: %__MODULE__{
value: term,
errors: [term],
adapter: Absinthe.Adapter.t(),
context: map,
root_value: any,
schema: Absinthe.Schema.t(),
definition: Absinthe.Blueprint.node_t(),
parent_type: Absinthe.Type.t(),
source: source,
state: field_state,
acc: %{any => any},
extensions: %{any => any},
arguments: arguments,
fragments: [Absinthe.Blueprint.Document.Fragment.Named.t()]
}
defstruct [
:value,
:adapter,
:context,
:parent_type,
:root_value,
:definition,
:schema,
:source,
errors: [],
middleware: [],
acc: %{},
arguments: %{},
extensions: %{},
private: %{},
path: [],
state: :unresolved,
fragments: [],
fields_cache: %{}
]
def resolver_spec(fun) do
{{__MODULE__, :call}, fun}
end
@type field_state :: :unresolved | :resolved | :suspended
@doc """
Get the child fields under the current field.
See `project/2` for details.
"""
def project(info) do
case info.definition.schema_node.type do
%Absinthe.Type.Interface{} ->
raise need_concrete_type_error()
%Absinthe.Type.Union{} ->
raise need_concrete_type_error()
schema_node ->
project(info, schema_node)
end
end
@doc """
Get the current path.
Each `Absinthe.Resolution` struct holds the current result path as a list of
blueprint nodes and indices. Usually however you don't need the full AST list
and instead just want the path that will eventually end up in the result.
For that, use this function.
## Examples
Given some query:
```
{users { email }}
```
If you called this function inside a resolver on the users email field it
returns a value like:
```elixir
resolve fn _, _, resolution ->
Absinthe.Resolution.path(resolution) #=> ["users", 5, "email"]
end
```
In this case `5` is the 0 based index in the list of users the field is currently
at.
"""
def path(%{path: path}) do
path
|> Enum.reverse()
|> Enum.drop(1)
|> Enum.map(&field_name/1)
end
defp field_name(%{alias: nil, name: name}), do: name
defp field_name(%{alias: name}), do: name
defp field_name(%{name: name}), do: name
defp field_name(index), do: index
@doc """
Get the child fields under the current field.
## Example
Given a document like:
```
{ user { id name }}
```
```
field :user, :user do
resolve fn _, info ->
child_fields = Absinthe.Resolution.project(info) |> Enum.map(&(&1.name))
# ...
end
end
```
`child_fields` will be `["id", "name"]`.
It correctly handles fragments, so for example if you had the document:
```
{
user {
... on User {
id
}
... on Named {
name
}
}
}
```
you would still get a nice and simple `child_fields` that was `["id", "name"]`.
"""
def project(
%{
definition: %{selections: selections},
path: path,
fields_cache: cache
} = info,
type
) do
type = Absinthe.Schema.lookup_type(info.schema, type)
{fields, _} = Absinthe.Resolution.Projector.project(selections, type, path, cache, info)
fields
end
defp need_concrete_type_error() do
"""
You tried to project from a field that is an abstract type without concrete type information!
Use `project/2` instead of `project/1`, and supply the type yourself please!
"""
end
def call(%{state: :unresolved} = res, resolution_function) do
result =
case resolution_function do
fun when is_function(fun, 2) ->
fun.(res.arguments, res)
fun when is_function(fun, 3) ->
fun.(res.source, res.arguments, res)
{mod, fun} ->
apply(mod, fun, [res.source, res.arguments, res])
_ ->
raise Absinthe.ExecutionError, """
Field resolve property must be a 2 arity anonymous function, 3 arity
anonymous function, or a `{Module, :function}` tuple.
Instead got: #{inspect(resolution_function)}
Info: #{inspect(res)}
"""
end
put_result(res, result)
end
def call(res, _), do: res
def path_string(%__MODULE__{path: path}) do
Enum.map(path, fn
%{name: name, alias: alias} ->
alias || name
%{schema_node: schema_node} ->
schema_node.name
end)
end
@doc """
Handy function for applying user function result tuples to a resolution struct
User facing functions generally return one of several tuples like `{:ok, val}`
or `{:error, reason}`. This function handles applying those various tuples
to the resolution struct.
The resolution state is updated depending on the tuple returned. `:ok` and
`:error` tuples set the state to `:resolved`, whereas middleware tuples set it
to `:unresolved`.
This is useful for middleware that wants to handle user facing functions, but
does not want to duplicate this logic.
"""
def put_result(res, {:ok, value}) do
%{res | state: :resolved, value: value}
end
def put_result(res, {:error, [{_, _} | _] = error_keyword}) do
%{res | state: :resolved, errors: [error_keyword]}
end
def put_result(res, {:error, errors}) do
%{res | state: :resolved, errors: List.wrap(errors)}
end
def put_result(res, {:plugin, module, opts}) do
put_result(res, {:middleware, module, opts})
end
def put_result(res, {:middleware, module, opts}) do
%{res | state: :unresolved, middleware: [{module, opts} | res.middleware]}
end
def put_result(res, result) do
raise result_error(result, res.definition, res.source)
end
@doc false
def result_error({:error, _} = value, field, source) do
result_error(
value,
field,
source,
"You're returning an :error tuple, but did you forget to include a `:message`\nkey in every custom error (map or keyword list)?"
)
end
def result_error(value, field, source) do
result_error(
value,
field,
source,
"Did you forget to return a valid `{:ok, any}` | `{:error, error_value}` tuple?"
)
end
@doc """
TODO: Deprecate
"""
def call(resolution_function, parent, args, field_info) do
case resolution_function do
fun when is_function(fun, 2) ->
fun.(args, field_info)
fun when is_function(fun, 3) ->
fun.(parent, args, field_info)
{mod, fun} ->
apply(mod, fun, [parent, args, field_info])
_ ->
raise Absinthe.ExecutionError, """
Field resolve property must be a 2 arity anonymous function, 3 arity
anonymous function, or a `{Module, :function}` tuple.
Instead got: #{inspect(resolution_function)}
Info: #{inspect(field_info)}
"""
end
end
def call(function, args, info) do
call(function, info.source, args, info)
end
@error_detail """
## For a data result
`{:ok, any}` result will do.
### Examples:
A simple integer result:
{:ok, 1}
Something more complex:
{:ok, %Model.Thing{some: %{complex: :data}}}
## For an error result
One or more errors for a field can be returned in a single `{:error, error_value}` tuple.
`error_value` can be:
- A simple error message string.
- A map containing `:message` key, plus any additional serializable metadata.
- A keyword list containing a `:message` key, plus any additional serializable metadata.
- A list containing multiple of any/all of these.
- Any other value compatible with `to_string/1`.
### Examples
A simple error message:
{:error, "Something bad happened"}
Multiple error messages:
{:error, ["Something bad", "Even worse"]
Single custom errors (note the required `:message` keys):
{:error, message: "Unknown user", code: 21}
{:error, %{message: "A database error occurred", details: format_db_error(some_value)}}
Three errors of mixed types:
{:error, ["Simple message", [message: "A keyword list error", code: 1], %{message: "A map error"}]}
Generic handler for interoperability with errors from other libraries:
{:error, :foo}
{:error, 1.0}
{:error, 2}
## To activate a plugin
`{:plugin, NameOfPluginModule, term}` to activate a plugin.
See `Absinthe.Resolution.Plugin` for more information.
"""
def result_error(value, field, source, guess) do
Absinthe.ExecutionError.exception("""
Invalid value returned from resolver.
Resolving field:
#{field.name}
Defined at:
#{field.schema_node.__reference__.location.file}:#{
field.schema_node.__reference__.location.line
}
Resolving on:
#{inspect(source)}
Got value:
#{inspect(value)}
...
#{guess}
...
The result must be one of the following...
#{@error_detail}
""")
end
end
defimpl Inspect, for: Absinthe.Resolution do
import Inspect.Algebra
def inspect(res, opts) do
# TODO: better inspect representation
inner =
res
|> Map.from_struct()
|> Map.update!(:fields_cache, fn _ ->
"#fieldscache<...>"
end)
|> Map.to_list()
|> Inspect.List.inspect(opts)
concat(["#Absinthe.Resolution<", inner, ">"])
end
end
defmodule Absinthe.Type.Deprecation do
@moduledoc false
@type t :: %{reason: binary}
defstruct reason: nil
end
defmodule Absinthe.Type.Interface do
@moduledoc """
A defined interface type that represent a list of named fields and their
arguments.
Fields on an interface have the same rules as fields on an
`Absinthe.Type.Object`.
If an `Absinthe.Type.Object` lists an interface in its `:interfaces` entry,
it guarantees that it defines the same fields and arguments that the
interface does.
Because sometimes it's for the interface to determine the implementing type of
a resolved object, you must either:
* Provide a `:resolve_type` function on the interface
* Provide a `:is_type_of` function on each implementing type
```
interface :named_entity do
field :name, :string
resolve_type fn
%{age: _}, _ -> :person
%{employee_count: _}, _ -> :business
_, _ -> nil
end
end
object :person do
field :name, :string
field :age, :string
interface :named_entity
end
object :business do
field :name, :string
field :employee_count, :integer
interface :named_entity
end
```
"""
use Absinthe.Introspection.Kind
alias Absinthe.Type
alias Absinthe.Schema
@typedoc """
* `:name` - The name of the interface type. Should be a TitleCased `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:fields` - A map of `Absinthe.Type.Field` structs. See `Absinthe.Schema.Notation.field/1` and
* `:args` - A map of `Absinthe.Type.Argument` structs. See `Absinthe.Schema.Notation.arg/2`.
* `:resolve_type` - A function used to determine the implementing type of a resolved object. See also `Absinthe.Type.Object`'s `:is_type_of`.
The `:resolve_type` function will be passed two arguments; the object whose type needs to be identified, and the `Absinthe.Execution` struct providing the full execution context.
The `__private__` and `:__reference__` keys are for internal use.
"""
@type t :: %__MODULE__{
name: binary,
description: binary,
fields: map,
identifier: atom,
__private__: Keyword.t(),
definition: Module.t(),
__reference__: Type.Reference.t()
}
defstruct name: nil,
description: nil,
fields: nil,
identifier: nil,
resolve_type: nil,
__private__: [],
definition: nil,
__reference__: nil,
resolve_type: nil
@doc false
defdelegate functions, to: Absinthe.Blueprint.Schema.InterfaceTypeDefinition
@spec resolve_type(Type.Interface.t(), any, Absinthe.Resolution.t()) :: Type.t() | nil
def resolve_type(type, obj, env, opts \\ [lookup: true])
def resolve_type(interface, obj, %{schema: schema} = env, opts) do
implementors = Schema.implementors(schema, interface.identifier)
if resolver = Type.function(interface, :resolve_type) do
case resolver.(obj, env) do
nil ->
nil
ident when is_atom(ident) ->
if opts[:lookup] do
Absinthe.Schema.lookup_type(schema, ident)
else
ident
end
end
else
type_name =
Enum.find(implementors, fn type ->
Absinthe.Type.function(type, :is_type_of).(obj)
end)
if opts[:lookup] do
Absinthe.Schema.lookup_type(schema, type_name)
else
type_name
end
end
end
@doc """
Whether the interface (or implementors) are correctly configured to resolve
objects.
"""
@spec type_resolvable?(Schema.t(), t) :: boolean
def type_resolvable?(schema, %{resolve_type: nil} = iface) do
Schema.implementors(schema, iface)
|> Enum.all?(& &1.is_type_of)
end
def type_resolvable?(_, %{resolve_type: _}) do
true
end
@doc false
@spec member?(t, Type.t()) :: boolean
def member?(%{identifier: ident}, %{interfaces: ifaces}) do
ident in ifaces
end
def member?(_, _) do
false
end
@spec implements?(Type.Interface.t(), Type.Object.t(), Type.Schema.t()) :: boolean
def implements?(interface, type, schema) do
covariant?(interface, type, schema)
end
defp covariant?(%wrapper{of_type: inner_type1}, %wrapper{of_type: inner_type2}, schema) do
covariant?(inner_type1, inner_type2, schema)
end
defp covariant?(%{name: name}, %{name: name}, _schema) do
true
end
defp covariant?(%Type.Interface{fields: ifields}, %{fields: type_fields}, schema) do
Enum.all?(ifields, fn {field_ident, ifield} ->
case Map.get(type_fields, field_ident) do
nil ->
false
field ->
covariant?(ifield.type, field.type, schema)
end
end)
end
defp covariant?(nil, _, _), do: false
defp covariant?(_, nil, _), do: false
defp covariant?(itype, type, schema) when is_atom(itype) do
itype = schema.__absinthe_type__(itype)
covariant?(itype, type, schema)
end
defp covariant?(itype, type, schema) when is_atom(type) do
type = schema.__absinthe_type__(type)
covariant?(itype, type, schema)
end
end
defmodule Absinthe.Type.Scalar do
@moduledoc """
Represents a primitive value.
GraphQL responses take the form of a hierarchical tree; the leaves on these
trees are scalars.
Also see `Absinthe.Type.Object`.
## Built-In Scalars
The following built-in scalar types are defined:
* `:boolean` - Represents `true` or `false`. See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
* `:float` - Represents signed double‐precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
* `:id` - Represents a unique identifier, often used to refetch an object or as key for a cache. The ID type is serialized in the same way as a String; however, it is not intended to be human‐readable. See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
* `:integer` - Represents a signed 32‐bit numeric non‐fractional value, greater than or equal to -2^31 and less than 2^31. Note that Absinthe uses the full word `:integer` to identify this type, but its `name` (used by variables, for instance), is `"Int"`. See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
* `:string` - Represents textual data, represented as UTF‐8 character sequences. The String type is most often used by GraphQL to represent free‐form human‐readable text. See the [GraphQL Specification](https://www.graphql.org/learn/schema/#scalar-types).
## Examples
Supporting a time format in ISOz format, using [Timex](http://hexdocs.pm/timex):
```
scalar :time do
description "Time (in ISOz format)"
parse &Timex.DateFormat.parse(&1, "{ISOz}")
serialize &Timex.DateFormat.format!(&1, "{ISOz}")
end
```
"""
use Absinthe.Introspection.Kind
alias Absinthe.Type
@doc false
defdelegate functions(), to: Absinthe.Blueprint.Schema.ScalarTypeDefinition
def serialize(type, value) do
Type.function(type, :serialize).(value)
end
def parse(type, value, context \\ %{}) do
case Type.function(type, :parse) do
parser when is_function(parser, 1) ->
parser.(value)
parser when is_function(parser, 2) ->
parser.(value, context)
end
end
@typedoc """
A defined scalar type.
Note new scalars should be defined using `Absinthe.Schema.Notation.scalar`.
* `:name` - The name of scalar. Should be a TitleCased `binary`. Set Automatically by `Absinthe.Schema.Notation.scalar`.
* `:description` - A nice description for introspection.
* `:serialize` - A function used to convert a value to a form suitable for JSON serialization
* `:parse` - A function used to convert the raw, incoming form of a scalar to the canonical internal format.
The `:__private__` and `:__reference__` keys are for internal use.
"""
@type t :: %__MODULE__{
name: binary,
description: binary,
identifier: atom,
__private__: Keyword.t(),
definition: Module.t(),
__reference__: Type.Reference.t()
}
defstruct name: nil,
description: nil,
identifier: nil,
__private__: [],
definition: nil,
__reference__: nil,
parse: nil,
serialize: nil
@typedoc "The internal, canonical representation of a scalar value"
@type value_t :: any
if System.get_env("DEBUG_INSPECT") do
defimpl Inspect do
def inspect(scalar, _) do
"#<Scalar:#{scalar.name}>"
end
end
end
end
defmodule Absinthe.Type.BuiltIns.Scalars.Utils do
@moduledoc false
# Parse, supporting pulling values out of AST nodes
defmacro parse_with(node_types, coercion) do
quote do
fn
%{value: value} = node ->
if Enum.member?(unquote(node_types), node) do
unquote(coercion).(value)
else
nil
end
other ->
unquote(coercion).(other)
end
end
end
end
defmodule Absinthe.Type.BuiltIns.Scalars do
use Absinthe.Schema.Notation
@moduledoc false
scalar :integer, name: "Int" do
description """
The `Int` scalar type represents non-fractional signed whole numeric values.
Int can represent values between `-(2^53 - 1)` and `2^53 - 1` since it is
represented in JSON as double-precision floating point numbers specified
by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
"""
serialize & &1
parse parse_with([Absinthe.Blueprint.Input.Integer], &parse_int/1)
end
scalar :float do
description """
The `Float` scalar type represents signed double-precision fractional
values as specified by
[IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
"""
serialize & &1
parse parse_with(
[Absinthe.Blueprint.Input.Integer, Absinthe.Blueprint.Input.Float],
&parse_float/1
)
end
scalar :string do
description """
The `String` scalar type represents textual data, represented as UTF-8
character sequences. The String type is most often used by GraphQL to
represent free-form human-readable text.
"""
serialize &String.Chars.to_string/1
parse parse_with([Absinthe.Blueprint.Input.String], &parse_string/1)
end
scalar :id, name: "ID" do
description """
The `ID` scalar type represents a unique identifier, often used to
refetch an object or as key for a cache. The ID type appears in a JSON
response as a String; however, it is not intended to be human-readable.
When expected as an input type, any string (such as `"4"`) or integer
(such as `4`) input value will be accepted as an ID.
"""
serialize &to_string/1
parse parse_with(
[Absinthe.Blueprint.Input.Integer, Absinthe.Blueprint.Input.String],
&parse_id/1
)
end
scalar :boolean do
description """
The `Boolean` scalar type represents `true` or `false`.
"""
serialize & &1
parse parse_with([Absinthe.Blueprint.Input.Boolean], &parse_boolean/1)
end
# Integers are only safe when between -(2^53 - 1) and 2^53 - 1 due to being
# encoded in JavaScript and represented in JSON as double-precision floating
# point numbers, as specified by IEEE 754.
@max_int 9_007_199_254_740_991
@min_int -9_007_199_254_740_991
@spec parse_int(any) :: {:ok, integer} | :error
defp parse_int(value) when is_integer(value) and value >= @min_int and value <= @max_int do
{:ok, value}
end
defp parse_int(_) do
:error
end
@spec parse_float(any) :: {:ok, float} | :error
defp parse_float(value) when is_float(value) do
{:ok, value}
end
defp parse_float(value) when is_integer(value) do
{:ok, value * 1.0}
end
defp parse_float(_) do
:error
end
@spec parse_string(any) :: {:ok, binary} | :error
defp parse_string(value) when is_binary(value) do
{:ok, value}
end
defp parse_string(_) do
:error
end
@spec parse_id(any) :: {:ok, binary} | :error
defp parse_id(value) when is_binary(value) do
{:ok, value}
end
defp parse_id(value) when is_integer(value) do
{:ok, Integer.to_string(value)}
end
defp parse_id(_) do
:error
end
@spec parse_boolean(any) :: {:ok, boolean} | :error
defp parse_boolean(value) when is_boolean(value) do
{:ok, value}
end
defp parse_boolean(_) do
:error
end
# Parse, supporting pulling values out of blueprint Input nodes
defp parse_with(node_types, coercion) do
fn
%{__struct__: str, value: value} ->
if Enum.member?(node_types, str) do
coercion.(value)
else
:error
end
%Absinthe.Blueprint.Input.Null{} ->
{:ok, nil}
other ->
coercion.(other)
end
end
end
defmodule Absinthe.Type.BuiltIns.Directives do
@moduledoc false
use Absinthe.Schema.Notation
alias Absinthe.Blueprint
directive :include do
description """
Directs the executor to include this field or fragment only when the `if` argument is true."
"""
arg :if, non_null(:boolean), description: "Included when true."
on [:field, :fragment_spread, :inline_fragment]
expand fn
%{if: true}, node ->
Blueprint.put_flag(node, :include, __MODULE__)
_, node ->
Blueprint.put_flag(node, :skip, __MODULE__)
end
end
directive :skip do
description """
Directs the executor to skip this field or fragment when the `if` argument is true.
"""
arg :if, non_null(:boolean), description: "Skipped when true."
on [:field, :fragment_spread, :inline_fragment]
expand fn
%{if: true}, node ->
Blueprint.put_flag(node, :skip, __MODULE__)
_, node ->
Blueprint.put_flag(node, :include, __MODULE__)
end
end
end
defmodule Absinthe.Type.BuiltIns.Introspection do
@moduledoc false
use Absinthe.Schema.Notation
object :__schema do
description "Represents a schema"
field :types, list_of(:__type) do
resolve fn _, %{schema: schema} ->
{:ok, Absinthe.Schema.used_types(schema) ++ Absinthe.Schema.introspection_types(schema)}
end
end
field :query_type,
type: :__type,
resolve: fn _, %{schema: schema} ->
{:ok, Absinthe.Schema.lookup_type(schema, :query)}
end
field :mutation_type,
type: :__type,
resolve: fn _, %{schema: schema} ->
{:ok, Absinthe.Schema.lookup_type(schema, :mutation)}
end
field :subscription_type,
type: :__type,
resolve: fn _, %{schema: schema} ->
{:ok, Absinthe.Schema.lookup_type(schema, :subscription)}
end
field :directives,
type: list_of(:__directive),
resolve: fn _, %{schema: schema} ->
{:ok, Absinthe.Schema.directives(schema)}
end
end
object :__directive do
description "Represents a directive"
field :name, :string
field :description, :string
field :args,
type: list_of(:__inputvalue),
resolve: fn _, %{source: source} ->
structs = source.args |> Map.values()
{:ok, structs}
end
field :on_operation,
deprecate: "Check `locations` field for enum value OPERATION",
type: :boolean,
resolve: fn _, %{source: source} ->
{:ok, Enum.any?(source.locations, &Enum.member?([:query, :mutation, :subscription], &1))}
end
field :on_fragment,
deprecate: "Check `locations` field for enum value FRAGMENT_SPREAD",
type: :boolean,
resolve: fn _, %{source: source} ->
{:ok, Enum.member?(source.locations, :fragment_spread)}
end
field :on_field,
type: :boolean,
deprecate: "Check `locations` field for enum value FIELD",
resolve: fn _, %{source: source} ->
{:ok, Enum.member?(source.locations, :field)}
end
field :locations, list_of(:__directive_location)
end
enum :__directive_location,
values: [
# OPERATIONS
:query,
:mutation,
:subscription,
:field,
:fragment_definition,
:fragment_spread,
:inline_fragment
# TODO: Schema definitions to support Schema input
]
object :__type do
description "Represents scalars, interfaces, object types, unions, enums in the system"
field :kind,
type: :string,
resolve: fn _, %{source: %{__struct__: type}} ->
{:ok, type.kind}
end
field :name, :string
field :description, :string
field :fields, list_of(:__field) do
arg :include_deprecated, :boolean, default_value: false
resolve fn
%{include_deprecated: show_deprecated}, %{source: %{__struct__: str, fields: fields}}
when str in [Absinthe.Type.Object, Absinthe.Type.Interface] ->
result =
fields
|> Enum.flat_map(fn {_, %{deprecation: is_deprecated} = field} ->
if !is_deprecated || (is_deprecated && show_deprecated) do
[field]
else
[]
end
end)
{:ok, result}
_, _ ->
{:ok, nil}
end
end
field :interfaces,
type: list_of(:__type),
resolve: fn
_, %{schema: schema, source: %{interfaces: interfaces}} ->
structs =
interfaces
|> Enum.map(fn ident ->
Absinthe.Schema.lookup_type(schema, ident)
end)
{:ok, structs}
_, _ ->
{:ok, nil}
end
field :possible_types,
type: list_of(:__type),
resolve: fn
_, %{schema: schema, source: %{types: types}} ->
structs = types |> Enum.map(&Absinthe.Schema.lookup_type(schema, &1))
{:ok, structs}
_, %{schema: schema, source: %Absinthe.Type.Interface{identifier: ident}} ->
{:ok, Absinthe.Schema.implementors(schema, ident)}
_, _ ->
{:ok, nil}
end
field :enum_values,
type: list_of(:__enumvalue),
args: [
include_deprecated: [
type: :boolean,
default_value: false
]
],
resolve: fn
%{include_deprecated: show_deprecated}, %{source: %Absinthe.Type.Enum{values: values}} ->
result =
values
|> Enum.flat_map(fn {_, %{deprecation: is_deprecated} = value} ->
if !is_deprecated || (is_deprecated && show_deprecated) do
[value]
else
[]
end
end)
{:ok, result}
_, _ ->
{:ok, nil}
end
field :input_fields,
type: list_of(:__inputvalue),
resolve: fn
_, %{source: %Absinthe.Type.InputObject{fields: fields}} ->
structs = fields |> Map.values()
{:ok, structs}
_, %{source: _} ->
{:ok, nil}
end
field :of_type,
type: :__type,
resolve: fn
_, %{schema: schema, source: %{of_type: type}} ->
{:ok, Absinthe.Schema.lookup_type(schema, type, unwrap: false)}
_, _ ->
{:ok, nil}
end
end
object :__field do
field :name,
type: :string,
resolve: fn _, %{adapter: adapter, source: source} ->
{:ok, adapter.to_external_name(source.name, :field)}
end
field :description, :string
field :args,
type: list_of(:__inputvalue),
resolve: fn _, %{source: source} ->
{:ok, Map.values(source.args)}
end
field :type,
type: :__type,
resolve: fn _, %{schema: schema, source: source} ->
result =
case source.type do
type when is_atom(type) ->
Absinthe.Schema.lookup_type(schema, source.type)
type ->
type
end
{:ok, result}
end
field :is_deprecated,
type: :boolean,
resolve: fn
_, %{source: %{deprecation: nil}} ->
{:ok, false}
_, _ ->
{:ok, true}
end
field :deprecation_reason,
type: :string,
resolve: fn
_, %{source: %{deprecation: nil}} ->
{:ok, nil}
_, %{source: %{deprecation: dep}} ->
{:ok, dep.reason}
end
end
object :__inputvalue, name: "__InputValue" do
field :name,
type: :string,
resolve: fn _, %{adapter: adapter, source: source} ->
{:ok, adapter.to_external_name(source.name, :field)}
end
field :description, :string
field :type,
type: :__type,
resolve: fn _, %{schema: schema, source: %{type: ident}} ->
type = Absinthe.Schema.lookup_type(schema, ident, unwrap: false)
{:ok, type}
end
field :default_value,
type: :string,
resolve: fn
_, %{source: %{default_value: nil}} ->
{:ok, nil}
_, %{schema: schema, source: %{default_value: value, type: type}} ->
case Absinthe.Schema.lookup_type(schema, type, unwrap: true) do
%Absinthe.Type.Enum{values_by_internal_value: values} ->
{:ok, values[value].name}
%Absinthe.Type.Scalar{} = type ->
{:ok, inspect(Absinthe.Type.function(type, :serialize).(value))}
_ ->
{:ok, to_string(value)}
end
_, %{source: _} ->
{:ok, nil}
end
end
object :__enumvalue, name: "__EnumValue" do
field :name, :string
field :description, :string
field :is_deprecated,
type: :boolean,
resolve: fn
_, %{source: %{deprecation: nil}} ->
{:ok, false}
_, _ ->
{:ok, true}
end
field :deprecation_reason,
type: :string,
resolve: fn
_, %{source: %{deprecation: nil}} ->
{:ok, nil}
_, %{source: %{deprecation: dep}} ->
{:ok, dep.reason}
end
end
end
defmodule Absinthe.Type.Field do
alias Absinthe.Type
@moduledoc """
Used to define a field.
Usually these are defined using `Absinthe.Schema.Notation.field/4`
See the `t` type below for details and examples of how to define a field.
"""
alias Absinthe.Type
alias Absinthe.Type.Deprecation
alias Absinthe.Schema
use Type.Fetch
@typedoc """
A resolver function.
See the `Absinthe.Type.Field.t` explanation of `:resolve` for more information.
"""
@type resolver_t :: (%{atom => any}, Absinthe.Resolution.t() -> result)
@typedoc """
The result of a resolver.
"""
@type result :: ok_result | error_result | middleware_result
@typedoc """
A complexity function.
See the `Absinthe.Type.Field/t` explanation of `:complexity` for more
information.
"""
@type complexity_t ::
(%{atom => any}, non_neg_integer -> non_neg_integer)
| (%{atom => any}, non_neg_integer, Absinthe.Complexity.t() -> non_neg_integer)
| {module, atom}
| non_neg_integer
@type ok_result :: {:ok, any}
@type error_result :: {:error, error_value}
@type middleware_result :: {:middleware, Absinthe.Middleware.spec(), term}
@typedoc """
An error message is a human-readable string describing the error that occurred.
"""
@type error_message :: String.t()
@typedoc """
Any serializable value.
"""
@type serializable :: any
@typedoc """
A custom error may be a `map` or a `Keyword.t`, but must contain a `:message` key.
Note that the values that make up a custom error must be serializable.
"""
@type custom_error ::
%{required(:message) => error_message, optional(atom) => serializable} | Keyword.t()
@typedoc """
An error value is a simple error message, a custom error, or a list of either/both of them.
"""
@type error_value ::
error_message | custom_error | [error_message | custom_error] | serializable
@typedoc """
The configuration for a field.
* `:name` - The name of the field, usually assigned automatically by
the `Absinthe.Schema.Notation.field/1`.
* `:description` - Description of a field, useful for introspection.
* `:deprecation` - Deprecation information for a field, usually
set-up using `Absinthe.Schema.Notation.deprecate/1`.
* `:type` - The type the value of the field should resolve to
* `:args` - The arguments of the field, usually created by using `Absinthe.Schema.Notation.arg/2`.
* `:resolve` - The resolution function. See below for more information.
* `:complexity` - The complexity function. See below for more information.
* `:default_value` - The default value of a field. Note this is not used during resolution; only fields that are part of an `Absinthe.Type.InputObject` should set this value.
## Resolution Functions
### Default
If no resolution function is given, the default resolution function is used,
which is roughly equivalent to this:
{:ok, Map.get(parent_object, field_name)}
This is commonly use when listing the available fields on a
`Absinthe.Type.Object` that models a data record. For instance:
```
object :person do
description "A person"
field :first_name, :string
field :last_name, :string
end
```
### Custom Resolution
When accepting arguments, however, you probably need to use them for
something. Here's an example of definining a field that looks up a list of
users for a given `location_id`:
```
query do
field :users, :person do
arg :location_id, non_null(:id)
resolve fn %{location_id: id}, _ ->
{:ok, MyApp.users_for_location_id(id)}
end
end
end
```
Custom resolution functions are passed two arguments:
1. A map of the arguments for the field, filled in with values from the
provided query document/variables.
2. An `Absinthe.Resolution` struct, containing the execution environment
for the field (and useful for complex resolutions using the resolved source
object, etc)
## Complexity function
### Default
If no complexity function is given, the default complexity function is used,
which is equivalent to:
fn(_, child_complexity) -> 1 + child_complexity end
### Custom Complexity
When accepting arguments, however, you probably need to use them for
something. Here's an example of defining a field that looks up at most
`limit` users:
```
query do
field :users, :person do
arg :limit, :integer
complexity fn %{limit: limit}, child_complexity ->
10 + limit * child_complexity
end
end
end
```
An optional third argument, `Absinthe.Complexity` struct, provides extra
information. Here's an example of changing the complexity using the context:
```
query do
field :users, :person do
arg :limit, :integer
complexity fn _, child_complexity, %{context: %{admin: admin?}} ->
if admin?, do: 0, else: 10 + limit * child_complexity
end
end
end
```
Custom complexity functions are passed two or three arguments:
1. A map of the arguments for the field, filled in with values from the
provided query document/variables.
2. A non negative integer, which is total complexity of the child fields.
3. An `Absinthe.Complexity` struct with information about the context of the
field. This argument is optional when using an anonymous function.
Alternatively complexity can be an integer greater than or equal to 0:
```
query do
field :users, :person do
complexity 10
end
end
```
"""
@type t :: %__MODULE__{
identifier: atom,
name: binary,
description: binary | nil,
type: Type.identifier_t(),
deprecation: Deprecation.t() | nil,
default_value: any,
args: %{(binary | atom) => Absinthe.Type.Argument.t()} | nil,
middleware: [],
complexity: complexity_t | nil,
__private__: Keyword.t(),
definition: Module.t(),
__reference__: Type.Reference.t()
}
defstruct identifier: nil,
name: nil,
description: nil,
type: nil,
deprecation: nil,
args: %{},
# used by subscription fields
config: nil,
# used by mutation fields
triggers: [],
middleware: [],
complexity: nil,
default_value: nil,
__private__: [],
definition: nil,
__reference__: nil
@doc false
defdelegate functions, to: Absinthe.Blueprint.Schema.FieldDefinition
defimpl Absinthe.Traversal.Node do
def children(node, traversal) do
found = Schema.lookup_type(traversal.context, node.type)
if found do
[found | node.args |> Map.values()]
else
type_names = traversal.context.types.by_identifier |> Map.keys() |> Enum.join(", ")
raise "Unknown Absinthe type for field `#{node.name}': (#{node.type |> Type.unwrap()} not in available types, #{
type_names
})"
end
end
end
end
defmodule Absinthe.Type.Argument do
@moduledoc """
Used to define an argument.
Usually these are defined using `Absinthe.Schema.Notation.arg/2`
"""
alias Absinthe.Type
use Type.Fetch
@typedoc """
Argument configuration
* `:name` - The name of the argument, usually assigned automatically using `Absinthe.Schema.Notation.arg/2`.
* `:type` - The type values the argument accepts/will coerce to.
* `:deprecation` - Deprecation information for an argument, usually
set-up using `Absinthe.Schema.Notation.deprecate/1`.
* `:description` - Description of an argument, useful for introspection.
"""
@type t :: %__MODULE__{
name: binary,
type: Type.identifier_t(),
default_value: any,
deprecation: Type.Deprecation.t() | nil,
description: binary | nil,
definition: Module.t(),
__reference__: Type.Reference.t()
}
defstruct identifier: nil,
name: nil,
description: nil,
type: nil,
deprecation: nil,
default_value: nil,
definition: nil,
__reference__: nil
defimpl Absinthe.Traversal.Node do
def children(node, _traversal) do
[node.type]
end
end
end
defmodule Absinthe.Type.Fetch do
@moduledoc false
defmacro __using__(_) do
quote do
def fetch(container, key) do
if Map.has_key?(container, key) do
{:ok, container |> Map.get(key)}
else
:error
end
end
end
end
end
defmodule Absinthe.Type.Enum.Value do
@moduledoc """
A possible value for an enum.
See `Absinthe.Type.Enum` and `Absinthe.Schema.Notation.value/1`.
"""
alias Absinthe.Type
@typedoc """
A defined enum value entry.
Generally defined using `Absinthe.Schema.Notation.value/2` as
part of a schema.
* `:name` - The name of the value. This is also the incoming, external
value that will be provided by query documents.
* `:description` - A nice description for introspection.
* `:value` - The raw, internal value that `:name` map to. This will be
provided as the argument value to resolve functions.
to `resolve` functions
* `:deprecation` - Deprecation information for a value, usually
set-up using the `Absinthe.Schema.Notation.deprecate/2` convenience
function.
"""
@type t :: %{
name: binary,
description: binary,
value: any,
deprecation: Type.Deprecation.t() | nil,
__reference__: Type.Reference.t()
}
defstruct name: nil, description: nil, value: nil, deprecation: nil, __reference__: nil
end
defmodule Absinthe.Type.List do
@moduledoc """
A wrapping type which declares the type of each item in the list.
## Examples
Given a type, `:item`, to declare the type of a field/argument as a list of
`:item`-typed values, you could do:
```
type: %Absinthe.Type.List{of_type: :item}
```
But normally this would be done using `Absinthe.Schema.Notation.list_of/1`.
```
type: list_of(:item)
```
"""
use Absinthe.Introspection.Kind
use Absinthe.Type.Fetch
@typedoc "
A defined list type.
## Options
* `:of_type` - The underlying, wrapped type.
"
@type t :: %__MODULE__{of_type: Absinthe.Type.t()}
defstruct of_type: nil
end
defmodule Absinthe.Type.Object do
@moduledoc """
Represents a non-leaf node in a GraphQL tree of information.
Objects represent a list of named fields, each of which yield a value of a
specific type. Object values are serialized as unordered maps, where the
queried field names (or aliases) are the keys and the result of evaluating the
field is the value.
Also see `Absinthe.Type.Scalar`.
## Examples
Given a type defined as the following (see `Absinthe.Schema.Notation.object/3`):
```
@desc "A person"
object :person do
field :name, :string
field :age, :integer
field :best_friend, :person
field :pets, list_of(:pet)
end
```
The "Person" type (referred inside Absinthe as `:person`) is an object, with
fields that use `Absinthe.Type.Scalar` types (namely `:name` and `:age`), and
other `Absinthe.Type.Object` types (`:best_friend` and `:pets`, assuming
`:pet` is an object).
Given we have a query that supports getting a person by name
(see `Absinthe.Schema`), and a query document like the following:
```
{
person(name: "Joe") {
name
best_friend {
name
age
}
pets {
breed
}
}
}
```
We could get a result like this:
```
%{
data: %{
"person" => %{
"best_friend" => %{
"name" => "Jill",
"age" => 29
},
"pets" => [
%{"breed" => "Wyvern"},
%{"breed" => "Royal Griffon"}
]
}
}
}
```
"""
alias Absinthe.Type
use Absinthe.Introspection.Kind
@typedoc """
A defined object type.
Note new object types (with the exception of the root-level `query`, `mutation`, and `subscription`)
should be defined using `Absinthe.Schema.Notation.object/3`.
* `:name` - The name of the object type. Should be a TitleCased `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:fields` - A map of `Absinthe.Type.Field` structs. Usually built via `Absinthe.Schema.Notation.field/1`.
* `:interfaces` - A list of interfaces that this type guarantees to implement. See `Absinthe.Type.Interface`.
* `:is_type_of` - A function used to identify whether a resolved object belongs to this defined type. For use with `:interfaces` entry and `Absinthe.Type.Interface`.
The `__private__` and `:__reference__` keys are for internal use.
"""
@type t :: %__MODULE__{
identifier: atom,
name: binary,
description: binary,
fields: map,
interfaces: [Absinthe.Type.Interface.t()],
__private__: Keyword.t(),
definition: Module.t(),
__reference__: Type.Reference.t()
}
defstruct identifier: nil,
name: nil,
description: nil,
fields: nil,
interfaces: [],
__private__: [],
definition: nil,
__reference__: nil,
is_type_of: nil
@doc false
defdelegate functions, to: Absinthe.Blueprint.Schema.ObjectTypeDefinition
@doc false
@spec field(t, atom) :: Absinthe.Type.Field.t()
def field(%{fields: fields}, identifier) do
fields
|> Map.get(identifier)
end
defimpl Absinthe.Traversal.Node do
def children(node, _traversal) do
Map.values(node.fields) ++ node.interfaces
end
end
end
defmodule Absinthe.Type.Custom do
use Absinthe.Schema.Notation
@moduledoc """
This module contains the following additional data types:
- datetime (UTC)
- naive_datetime
- date
- time
- decimal (only if [Decimal](https://hex.pm/packages/decimal) is available)
Further description of these types can be found in the source code.
To use: `import_types Absinthe.Type.Custom`.
"""
scalar :datetime, name: "DateTime" do
description """
The `DateTime` scalar type represents a date and time in the UTC
timezone. The DateTime appears in a JSON response as an ISO8601 formatted
string, including UTC timezone ("Z"). The parsed date and time string will
be converted to UTC and any UTC offset other than 0 will be rejected.
"""
serialize &DateTime.to_iso8601/1
parse &parse_datetime/1
end
scalar :naive_datetime, name: "NaiveDateTime" do
description """
The `Naive DateTime` scalar type represents a naive date and time without
timezone. The DateTime appears in a JSON response as an ISO8601 formatted
string.
"""
serialize &NaiveDateTime.to_iso8601/1
parse &parse_naive_datetime/1
end
scalar :date do
description """
The `Date` scalar type represents a date. The Date appears in a JSON
response as an ISO8601 formatted string.
"""
serialize &Date.to_iso8601/1
parse &parse_date/1
end
scalar :time do
description """
The `Time` scalar type represents a time. The Time appears in a JSON
response as an ISO8601 formatted string.
"""
serialize &Time.to_iso8601/1
parse &parse_time/1
end
if Code.ensure_loaded?(Decimal) do
scalar :decimal do
description """
The `Decimal` scalar type represents signed double-precision fractional
values parsed by the `Decimal` library. The Decimal appears in a JSON
response as a string to preserve precision.
"""
serialize &Absinthe.Type.Custom.Decimal.serialize/1
parse &Absinthe.Type.Custom.Decimal.parse/1
end
end
@spec parse_datetime(Absinthe.Blueprint.Input.String.t()) :: {:ok, DateTime.t()} | :error
@spec parse_datetime(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil}
defp parse_datetime(%Absinthe.Blueprint.Input.String{value: value}) do
case DateTime.from_iso8601(value) do
{:ok, datetime, 0} -> {:ok, datetime}
{:ok, _datetime, _offset} -> :error
_error -> :error
end
end
defp parse_datetime(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
defp parse_datetime(_) do
:error
end
@spec parse_naive_datetime(Absinthe.Blueprint.Input.String.t()) ::
{:ok, NaiveDateTime.t()} | :error
@spec parse_naive_datetime(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil}
defp parse_naive_datetime(%Absinthe.Blueprint.Input.String{value: value}) do
case NaiveDateTime.from_iso8601(value) do
{:ok, naive_datetime} -> {:ok, naive_datetime}
_error -> :error
end
end
defp parse_naive_datetime(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
defp parse_naive_datetime(_) do
:error
end
@spec parse_date(Absinthe.Blueprint.Input.String.t()) :: {:ok, Date.t()} | :error
@spec parse_date(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil}
defp parse_date(%Absinthe.Blueprint.Input.String{value: value}) do
case Date.from_iso8601(value) do
{:ok, date} -> {:ok, date}
_error -> :error
end
end
defp parse_date(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
defp parse_date(_) do
:error
end
@spec parse_time(Absinthe.Blueprint.Input.String.t()) :: {:ok, Time.t()} | :error
@spec parse_time(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil}
defp parse_time(%Absinthe.Blueprint.Input.String{value: value}) do
case Time.from_iso8601(value) do
{:ok, time} -> {:ok, time}
_error -> :error
end
end
defp parse_time(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
defp parse_time(_) do
:error
end
end
defmodule Absinthe.Type.InputObject do
@moduledoc """
Defines a GraphQL input object
Input objects enable nested arguments to queries and mutations.
## Example
```
mutation do
field :user, :user do
arg :name, :string
arg :contact, non_null(:contact)
resolve &User.create/2
end
end
input_object :contact do
field :email, :string
end
```
This supports the following `mutation`:
```graphql
mutation CreateUser {
user(contact: {email: "[email protected]"}) {
id
}
}
```
"""
use Absinthe.Introspection.Kind
use Absinthe.Type.Fetch
alias Absinthe.Type
@typedoc """
Note new input object types should be defined using
`Absinthe.Schema.Notation.input_object/3`.
* `:name` - The name of the input object type. Should be a TitleCased `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:fields` - A map of `Absinthe.Type.Field` structs. Usually built via `Absinthe.Schema.Notation.field/1`.
The `__private__` and `:__reference__` fields are for internal use.
"""
@type t :: %__MODULE__{
name: binary,
description: binary,
fields: map,
identifier: atom,
__private__: Keyword.t(),
definition: Module.t(),
__reference__: Type.Reference.t()
}
defstruct name: nil,
description: nil,
fields: %{},
identifier: nil,
__private__: [],
definition: nil,
__reference__: nil
defimpl Absinthe.Traversal.Node do
def children(node, _traversal) do
Map.values(node.fields)
end
end
end
defmodule Absinthe.Type.Enum do
@moduledoc """
Used to define an enum type, a special scalar that can only have a defined set
of values.
See the `t` type below for details and examples.
## Examples
Given a type defined as the following (see `Absinthe.Schema.Notation`):
```
@desc "The selected color channel"
enum :color_channel do
value :red, as: :r, description: "Color Red"
value :green, as: :g, description: "Color Green"
value :blue, as: :b, description: "Color Blue"
value :alpha, as: :a, deprecate: "We no longer support opacity settings", description: "Alpha Channel"
end
```
The "ColorChannel" type (referred inside Absinthe as `:color_channel`) is an
Enum type, with values with names "red", "green", "blue", and "alpha" that map
to internal, raw values `:r`, `:g`, `:b`, and `:a`. The alpha color channel
is deprecated, just as fields and arguments can be.
You can omit the raw `value` if you'd like it to be the same as the
identifier. For instance, in this example the `value` is automatically set to
`:red`:
```
enum :color_channel do
description "The selected color channel"
value :red, description: "Color Red"
value :green, description: "Color Green"
value :blue, description: "Color Blue"
value :alpha, deprecate: "We no longer support opacity settings", description: "Alpha Channel"
end
```
If you really want to use a shorthand, skipping support for descriptions,
custom raw values, and deprecation, you can just provide a list of atoms:
```
enum :color_channel, values: [:red, :green, :blue, :alpha]
```
Keep in mind that writing a terse definition that skips descriptions and
deprecations today may hamper tooling that relies on introspection tomorrow.
"""
use Absinthe.Introspection.Kind
alias Absinthe.{Blueprint, Type}
@typedoc """
A defined enum type.
Should be defined using `Absinthe.Schema.Notation.enum/2`.
* `:name` - The name of the enum type. Should be a TitleCased `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:values` - The enum values, usually provided using the `Absinthe.Schema.Notation.values/1` or `Absinthe.Schema.Notation.value/1` macro.
The `__private__` and `:__reference__` fields are for internal use.
"""
@type t :: %__MODULE__{
name: binary,
description: binary,
values: %{binary => Type.Enum.Value.t()},
identifier: atom,
__private__: Keyword.t(),
definition: Module.t(),
__reference__: Type.Reference.t()
}
defstruct name: nil,
description: nil,
identifier: nil,
values: %{},
values_by_internal_value: %{},
values_by_name: %{},
__private__: [],
definition: nil,
__reference__: nil
# Get the internal representation of an enum value
@doc false
@spec parse(t, any) :: any
def parse(enum, %Blueprint.Input.Enum{value: external_value}) do
Map.fetch(enum.values_by_name, external_value)
end
def parse(_, _) do
:error
end
# Get the external representation of an enum value
@doc false
@spec serialize(t, any) :: binary
def serialize(enum, internal_value) do
Map.fetch!(enum.values_by_internal_value, internal_value).name
end
end
defmodule Absinthe.Type.NonNull do
@moduledoc """
A type that wraps an underlying type, acting identically to that type but
adding a non-null constraint.
By default, all types in GraphQL are nullable. To declare a type that
disallows null, wrap it in a `Absinthe.Type.NonNull` struct.
## Examples
Given a type, `:item`, to declare it as non-null, you could do the following:
```
type: %Absinthe.Type.NonNull{of_type: :item}
```
But normally this would be done using `Absinthe.Schema.Notation.non_null/1`.
```
type: non_null(:item)
```
"""
use Absinthe.Introspection.Kind
use Absinthe.Type.Fetch
@typedoc """
A defined non-null type.
## Options
* `:of_type` -- the underlying type to wrap
"""
defstruct of_type: nil
@type t :: %__MODULE__{of_type: Absinthe.Type.nullable_t()}
@type t(x) :: %__MODULE__{of_type: x}
end
defmodule Absinthe.Type.Union do
@moduledoc """
A unions is an abstract type made up of multiple possible concrete types.
No common fields are declared in a union. Compare to `Absinthe.Type.Interface`.
Because it's necessary for the union to determine the concrete type of a
resolved object, you must either:
* Provide a `:resolve_type` function on the union
* Provide a `:is_type_of` function on each possible concrete type
```
union :search_result do
description "A search result"
types [:person, :business]
resolve_type fn
%Person{}, _ -> :person
%Business{}, _ -> :business
end
end
```
"""
use Absinthe.Introspection.Kind
alias Absinthe.{Schema, Type}
@typedoc """
* `:name` - The name of the union type. Should be a TitleCased `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:types` - The list of possible types.
* `:resolve_type` - A function used to determine the concrete type of a resolved object. See also `Absinthe.Type.Object`'s `:is_type_of`. Either `resolve_type` is specified in the union type, or every object type in the union must specify `is_type_of`
The `:resolve_type` function will be passed two arguments; the object whose type needs to be identified, and the `Absinthe.Execution` struct providing the full execution context.
The `__private__` and `:__reference__` keys are for internal use.
"""
@type t :: %__MODULE__{
name: binary,
description: binary,
types: [Type.identifier_t()],
identifier: atom,
__private__: Keyword.t(),
definition: Module.t(),
__reference__: Type.Reference.t()
}
defstruct name: nil,
description: nil,
identifier: nil,
resolve_type: nil,
types: [],
__private__: [],
definition: nil,
__reference__: nil
@doc false
defdelegate functions, to: Absinthe.Blueprint.Schema.UnionTypeDefinition
@doc false
@spec member?(t, Type.t()) :: boolean
def member?(%{types: types}, %{__reference__: %{identifier: ident}}) do
ident in types
end
def member?(_, _) do
false
end
@doc false
@spec resolve_type(t, any, Absinthe.Resolution.t()) :: Type.t() | nil
def resolve_type(type, object, env, opts \\ [lookup: true])
def resolve_type(%{types: types} = union, obj, %{schema: schema} = env, opts) do
if resolver = Type.function(union, :resolve_type) do
case resolver.(obj, env) do
nil ->
nil
ident when is_atom(ident) ->
if opts[:lookup] do
Absinthe.Schema.lookup_type(schema, ident)
else
ident
end
end
else
type_name =
Enum.find(types, fn
%{is_type_of: nil} ->
false
type ->
type = Absinthe.Schema.lookup_type(schema, type)
Absinthe.Type.function(type, :is_type_of).(obj)
end)
if opts[:lookup] do
Schema.lookup_type(schema, type_name)
else
type_name
end
end
end
end
if Code.ensure_loaded?(Decimal) do
defmodule Absinthe.Type.Custom.Decimal do
@moduledoc false
defdelegate serialize(value), to: Decimal, as: :to_string
@spec parse(any) :: {:ok, Decimal.t()} | :error
@spec parse(Absinthe.Blueprint.Input.Null.t()) :: {:ok, nil}
def parse(%Absinthe.Blueprint.Input.String{value: value}) do
case Decimal.parse(value) do
{:ok, decimal} -> {:ok, decimal}
_ -> :error
end
end
def parse(%Absinthe.Blueprint.Input.Float{value: value}) do
decimal = Decimal.new(value)
if Decimal.nan?(decimal), do: :error, else: {:ok, decimal}
end
def parse(%Absinthe.Blueprint.Input.Integer{value: value}) do
decimal = Decimal.new(value)
if Decimal.nan?(decimal), do: :error, else: {:ok, decimal}
end
def parse(%Absinthe.Blueprint.Input.Null{}) do
{:ok, nil}
end
def parse(_) do
:error
end
end
else
defmodule Absinthe.Type.Custom.Decimal do
@moduledoc false
@spec parse(any) :: :error
def parse(_), do: :error
@spec serialize(any) :: nil
def serialize(_), do: nil
end
end
defmodule Absinthe.Type.Reference do
@moduledoc false
@typedoc false
@type t :: %__MODULE__{module: atom, identifier: atom, name: binary}
defstruct module: nil, identifier: nil, name: nil
end
defmodule Absinthe.Type.Directive do
@moduledoc """
Used by the GraphQL runtime as a way of modifying execution
behavior.
Type system creators will usually not create these directly.
"""
alias Absinthe.Type
alias Absinthe.Language
use Absinthe.Introspection.Kind
@typedoc """
A defined directive.
* `:name` - The name of the directivee. Should be a lowercase `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:args` - A map of `Absinthe.Type.Argument` structs. See `Absinthe.Schema.Notation.arg/1`.
* `:locations` - A list of places the directives can be used.
* `:instruction` - A function that, given an argument, returns an instruction for the correct action to take
The `:__reference__` key is for internal use.
"""
@type t :: %{
name: binary,
description: binary,
identifier: atom,
args: map,
locations: [location],
instruction: (map -> atom),
definition: Module.t(),
__private__: Keyword.t(),
__reference__: Type.Reference.t()
}
@type location ::
:query | :mutation | :field | :fragment_definition | :fragment_spread | :inline_fragment
defstruct name: nil,
description: nil,
identifier: nil,
args: nil,
locations: [],
expand: nil,
instruction: nil,
definition: nil,
__private__: [],
__reference__: nil
@doc false
defdelegate functions, to: Absinthe.Blueprint.Schema.DirectiveDefinition
# Whether the directive is active in `place`
@doc false
@spec on?(t, Language.t()) :: boolean
def on?(%{locations: locations}, place) do
Enum.any?(locations, &do_on?(&1, place))
end
# Operations
defp do_on?(location, %Language.OperationDefinition{operation: location}), do: true
defp do_on?(:field, %Language.Field{}), do: true
defp do_on?(:fragment_definition, %Language.Fragment{}), do: true
defp do_on?(:fragment_spread, %Language.FragmentSpread{}), do: true
defp do_on?(:inline_fragment, %Language.InlineFragment{}), do: true
# TODO: Schema definitions to support Schema input
defp do_on?(_, _), do: false
# Check a directive and return an instruction
@doc false
@spec check(t, Language.t(), map) :: atom
def check(definition, place, args) do
if on?(definition, place) && definition.instruction do
definition.instruction.(args)
else
:ok
end
end
end
defmodule Absinthe.Resolution.Helpers do
@moduledoc """
Handy functions for returning async or batched resolution functions
Using `Absinthe.Schema.Notation` or (by extension) `Absinthe.Schema` will
automatically import the `batch` and `async` helpers. Dataloader helpers
require an explicit `import Absinthe.Resolution.Helpers` invocation, since
dataloader is an optional dependency.
"""
alias Absinthe.Middleware
@doc """
Execute resolution field asynchronously.
This is a helper function for using the `Absinthe.Middleware.Async`.
Forbidden in mutation fields. (TODO: actually enforce this)
"""
@spec async((() -> term)) :: {:middleware, Middleware.Async, term}
@spec async((() -> term), Keyword.t()) :: {:middleware, Middleware.Async, term}
def async(fun, opts \\ []) do
{:middleware, Middleware.Async, {fun, opts}}
end
@doc """
Batch the resolution of several functions together.
Helper function for creating `Absinthe.Middleware.Batch`
# Example
Raw usage:
```elixir
object :post do
field :name, :string
field :author, :user do
resolve fn post, _, _ ->
batch({__MODULE__, :users_by_id}, post.author_id, fn batch_results ->
{:ok, Map.get(batch_results, post.author_id)}
end)
end
end
end
def users_by_id(_, user_ids) do
users = Repo.all from u in User, where: u.id in ^user_ids
Map.new(users, fn user -> {user.id, user} end)
end
```
"""
@spec batch(Middleware.Batch.batch_fun(), term, Middleware.Batch.post_batch_fun()) ::
{:plugin, Middleware.Batch, term}
@spec batch(
Middleware.Batch.batch_fun(),
term,
Middleware.Batch.post_batch_fun(),
opts :: Keyword.t()
) :: {:plugin, Middleware.Batch, term}
def batch(batch_fun, batch_data, post_batch_fun, opts \\ []) do
batch_config = {batch_fun, batch_data, post_batch_fun, opts}
{:middleware, Middleware.Batch, batch_config}
end
if Code.ensure_loaded?(Dataloader) do
@doc """
Dataloader helper function
This function is not imported by default. To make it available in your module do
```
import Absinthe.Resolution.Helpers
```
This function helps you use data loader in a direct way within your schema.
While normally the `dataloader/1,2,3` helpers are enough, `on_load/2` is useful
when you want to load multiple things in a single resolver, or when you need
fine grained control over the dataloader cache.
## Examples
```elixir
field :reports, list_of(:report) do
resolve fn shipment, _, %{context: %{loader: loader}} ->
loader
|> Dataloader.load(SourceName, :automatic_reports, shipment)
|> Dataloader.load(SourceName, :manual_reports, shipment)
|> on_load(fn loader ->
reports =
loader
|> Dataloader.get(SourceName, :automatic_reports, shipment)
|> Enum.concat(Dataloader.load(loader, SourceName, :manual_reports, shipment))
|> Enum.sort_by(&reported_at/1)
{:ok, reports}
end)
end
end
```
"""
def on_load(loader, fun) do
{:middleware, Absinthe.Middleware.Dataloader, {loader, fun}}
end
@type dataloader_tuple :: {:middleware, Absinthe.Middleware.Dataloader, term}
@type dataloader_key_fun ::
(Absinthe.Resolution.source(),
Absinthe.Resolution.arguments(),
Absinthe.Resolution.t() ->
{any, map})
@type dataloader_opt :: {:args, map} | {:use_parent, true | false}
@doc """
Resolve a field with a dataloader source.
This function is not imported by default. To make it available in your module do
```
import Absinthe.Resolution.Helpers
```
Same as `dataloader/3`, but it infers the resource name from the field name.
## Examples
```
field :author, :user, resolve: dataloader(Blog)
```
This is identical to doing the following.
```
field :author, :user, resolve: dataloader(Blog, :author, [])
```
"""
@spec dataloader(Dataloader.source_name()) :: dataloader_tuple
def dataloader(source) do
fn parent, args, %{context: %{loader: loader}} = res ->
resource = res.definition.schema_node.identifier
do_dataloader(loader, source, resource, args, parent, [])
end
end
@doc """
Resolve a field with Dataloader
This function is not imported by default. To make it available in your module do
```
import Absinthe.Resolution.Helpers
```
While `on_load/2` makes using dataloader directly easy within a resolver function,
it is often unnecessary to need this level of direct control.
The `dataloader/3` function exists to provide a simple API for using dataloader.
It takes the name of a data source, the name of the resource you want to load,
and then a variety of options.
## Basic Usage
```
object :user do
field :posts, list_of(:post),
resolve: dataloader(Blog, :posts, args: %{deleted: false})
field :organization, :organization do
resolve dataloader(Accounts, :organization, use_parent: false)
end
field(:account_active, non_null(:boolean), resolve: dataloader(
Accounts, :account, callback: fn account, _parent, _args ->
{:ok, account.active}
end
)
)
end
```
## Key Functions
Instead of passing in a literal like `:posts` or `:organization` in as the resource,
it is also possible pass in a function:
```
object :user do
field :posts, list_of(:post) do
arg :limit, non_null(:integer)
resolve dataloader(Blog, fn user, args, info ->
args = Map.update!(args, :limit, fn val ->
max(min(val, 20), 0)
end)
{:posts, args}
end)
end
end
```
In this case we want to make sure that the limit value cannot be larger than
`20`. By passing a callback function to `dataloader/2` we can ensure that
the value will fall nicely between 0 and 20.
## Options
- `:args` default: `%{}`. Any arguments you want to always pass into the
`Dataloader.load/4` call. Resolver arguments are merged into this value and,
in the event of a conflict, the resolver arguments win.
- `:callback` default: `default_callback/3`. Callback that is run with result
of dataloader. It receives the result as the first argument, and the parent
and args as second and third. Can be used to e.g. compute fields on the return
value of the loader. Should return an ok or error tuple.
- `:use_parent` default: `true`. This option affects whether or not the `dataloader/2`
helper will use any pre-existing value on the parent. IE if you return
`%{author: %User{...}}` from a blog post the helper will by default simply use
the pre-existing author. Set it to false if you always want it to load it fresh.
Ultimately, this helper calls `Dataloader.load/4`
using the loader in your context, the source you provide, the tuple `{resource, args}`
as the batch key, and then the parent value of the field
```
def dataloader(source_name, resource) do
fn parent, args, %{context: %{loader: loader}} ->
args = Map.merge(opts[:args] || %{}, args)
loader
|> Dataloader.load(source_name, {resource, args}, parent)
|> on_load(fn loader ->
{:ok, Dataloader.get(loader, source_name, {resource, args}, parent)}
end)
end
```
"""
def dataloader(source, fun, opts \\ [])
@spec dataloader(Dataloader.source_name(), dataloader_key_fun | any, [dataloader_opt]) ::
dataloader_tuple
def dataloader(source, fun, opts) when is_function(fun, 3) do
fn parent, args, %{context: %{loader: loader}} = res ->
{resource, args} = fun.(parent, args, res)
do_dataloader(loader, source, resource, args, parent, opts)
end
end
def dataloader(source, resource, opts) do
fn parent, args, %{context: %{loader: loader}} ->
do_dataloader(loader, source, resource, args, parent, opts)
end
end
defp use_parent(loader, source, resource, parent, args, opts) do
with true <- Keyword.get(opts, :use_parent, false),
{:ok, val} <- is_map(parent) && Map.fetch(parent, resource) do
Dataloader.put(loader, source, {resource, args}, parent, val)
else
_ -> loader
end
end
defp do_dataloader(loader, source, resource, args, parent, opts) do
args =
opts
|> Keyword.get(:args, %{})
|> Map.merge(args)
loader
|> use_parent(source, resource, parent, args, opts)
|> Dataloader.load(source, {resource, args}, parent)
|> on_load(fn loader ->
callback = Keyword.get(opts, :callback, &default_callback/3)
loader
|> Dataloader.get(source, {resource, args}, parent)
|> callback.(parent, args)
end)
end
defp default_callback(result, _parent, _args), do: {:ok, result}
end
end
defmodule Absinthe.Resolution.Projector do
@moduledoc false
alias Absinthe.{Blueprint, Type}
@doc """
Project one layer down from where we are right now.
Projection amounts to collecting the next set of fields to operate on, based on
the current field. This is a non trivial operation because you have to handle
the various type conditions that come along with fragments / inline fragments,
field merging, and other wondeful stuff like that.
"""
def project(selections, %{identifier: identifier} = parent_type, path, cache, exec) do
path_names = for %{name: name, alias: alias} <- path, name, do: alias || name
key = {identifier, path_names}
case Map.fetch(cache, key) do
{:ok, fields} ->
{fields, cache}
_ ->
fields =
selections
|> collect(parent_type, exec)
|> rectify_order
{fields, Map.put(cache, key, fields)}
end
end
defp response_key(%{alias: nil, name: name}), do: name
defp response_key(%{alias: alias}), do: alias
defp response_key(%{name: name}), do: name
defp collect(selections, parent_type, %{fragments: fragments, schema: schema}) do
{acc, _index} = do_collect(selections, fragments, parent_type, schema, 0, %{})
acc
end
defp do_collect([], _, _, _, index, acc), do: {acc, index}
defp do_collect([selection | selections], fragments, parent_type, schema, index, acc) do
case selection do
%{flags: %{skip: _}} ->
do_collect(selections, fragments, parent_type, schema, index, acc)
%Blueprint.Document.Field{} = field ->
field = update_schema_node(field, parent_type)
key = response_key(field)
acc =
Map.update(acc, key, {index, [field]}, fn {existing_index, fields} ->
{existing_index, [field | fields]}
end)
do_collect(selections, fragments, parent_type, schema, index + 1, acc)
%Blueprint.Document.Fragment.Inline{
type_condition: %{schema_node: condition},
selections: inner_selections
} ->
{acc, index} =
conditionally_collect(
condition,
inner_selections,
fragments,
parent_type,
schema,
index,
acc
)
do_collect(selections, fragments, parent_type, schema, index, acc)
%Blueprint.Document.Fragment.Spread{name: name} ->
%{type_condition: condition, selections: inner_selections} = Map.fetch!(fragments, name)
{acc, index} =
conditionally_collect(
condition,
inner_selections,
fragments,
parent_type,
schema,
index,
acc
)
do_collect(selections, fragments, parent_type, schema, index, acc)
end
end
defp rectify_order(grouped_fields) do
grouped_fields
|> Enum.sort(fn {_, {i1, _}}, {_, {i2, _}} ->
i1 <= i2
end)
|> Enum.map(fn
{_k, {_index, [field]}} ->
field
{_k, {_index, [%{selections: selections} = field | rest]}} ->
%{field | selections: flatten(rest, selections)}
end)
end
defp flatten([], acc), do: acc
defp flatten([%{selections: selections} | fields], acc) do
flatten(fields, selections ++ acc)
end
defp conditionally_collect(condition, selections, fragments, parent_type, schema, index, acc) do
condition
|> Type.unwrap()
|> normalize_condition(schema)
|> passes_type_condition?(parent_type)
|> case do
true -> do_collect(selections, fragments, parent_type, schema, index, acc)
false -> {acc, index}
end
end
# necessary when the field in question is on an abstract type.
defp update_schema_node(%{name: "__" <> _} = field, _) do
field
end
defp update_schema_node(%{schema_node: %{identifier: identifier}} = field, %{
fields: concrete_fields
}) do
%{field | schema_node: :maps.get(identifier, concrete_fields)}
end
defp normalize_condition(%{schema_node: condition}, schema) do
normalize_condition(condition, schema)
end
defp normalize_condition(%{} = condition, _schema) do
condition
end
defp normalize_condition(condition, schema) do
Absinthe.Schema.lookup_type(schema, condition)
end
defp passes_type_condition?(%Type.Object{name: name}, %Type.Object{name: name}) do
true
end
defp passes_type_condition?(%Type.Interface{} = condition, %Type.Object{} = type) do
Type.Interface.member?(condition, type)
end
defp passes_type_condition?(%Type.Union{} = condition, %Type.Object{} = type) do
Type.Union.member?(condition, type)
end
defp passes_type_condition?(_, _) do
false
end
end
defprotocol Absinthe.Traversal.Node do
@moduledoc false
@fallback_to_any true
@spec children(any, Absinthe.Traversal.t()) :: [any]
def children(node, traversal)
end
defimpl Absinthe.Traversal.Node, for: Any do
def children(_node, _traversal), do: []
end
defimpl Absinthe.Traversal.Node, for: Atom do
def children(node, %{context: schema}) do
if node == schema do
# Root schema node
[node.query, node.mutation, node.subscription]
|> Enum.reject(&is_nil/1)
else
# Type Reference
case Absinthe.Schema.lookup_type(schema, node) do
nil ->
[]
type ->
[type]
end
end
end
def children(_node, _traversal) do
[]
end
end
defmodule Absinthe.Schema do
alias Absinthe.Type
alias __MODULE__
@type t :: module
defmodule CompilationError do
defexception phase_errors: []
def message(error) do
details =
error.phase_errors
|> Enum.map(&"- #{&1.message}")
|> Enum.join("\n")
"Compilation failed:\n" <> details
end
end
defmacro __using__(_opt) do
quote do
use Absinthe.Schema.Notation
import unquote(__MODULE__), only: :macros
@after_compile unquote(__MODULE__)
defdelegate __absinthe_type__(name), to: __MODULE__.Compiled
defdelegate __absinthe_directive__(name), to: __MODULE__.Compiled
defdelegate __absinthe_types__(), to: __MODULE__.Compiled
defdelegate __absinthe_directives__(), to: __MODULE__.Compiled
defdelegate __absinthe_interface_implementors__(), to: __MODULE__.Compiled
def __absinthe_lookup__(name) do
__absinthe_type__(name)
end
@doc false
def middleware(middleware, _field, _object) do
middleware
end
@doc false
def plugins do
Absinthe.Plugin.defaults()
end
@doc false
def context(context) do
context
end
@doc false
def decorations(node, ancestors) do
[]
end
defoverridable(context: 1, middleware: 3, plugins: 0, decorations: 2)
end
end
@object_type Absinthe.Blueprint.Schema.ObjectTypeDefinition
@default_query_name "RootQueryType"
@doc """
Defines a root Query object
"""
defmacro query(raw_attrs \\ [name: @default_query_name], do: block) do
record_query(__CALLER__, raw_attrs, block)
end
defp record_query(env, raw_attrs, block) do
attrs =
raw_attrs
|> Keyword.put_new(:name, @default_query_name)
Absinthe.Schema.Notation.record!(env, @object_type, :query, attrs, block)
end
@default_mutation_name "RootMutationType"
@doc """
Defines a root Mutation object
```
mutation do
field :create_user, :user do
arg :name, non_null(:string)
arg :email, non_null(:string)
resolve &MyApp.Web.BlogResolvers.create_user/2
end
end
```
"""
defmacro mutation(raw_attrs \\ [name: @default_mutation_name], do: block) do
record_mutation(__CALLER__, raw_attrs, block)
end
defp record_mutation(env, raw_attrs, block) do
attrs =
raw_attrs
|> Keyword.put_new(:name, @default_mutation_name)
Absinthe.Schema.Notation.record!(env, @object_type, :mutation, attrs, block)
end
@default_subscription_name "RootSubscriptionType"
@doc """
Defines a root Subscription object
Subscriptions in GraphQL let a client submit a document to the server that
outlines what data they want to receive in the event of particular updates.
For a full walk through of how to setup your project with subscriptions and
Phoenix see the Absinthe.Phoenix project moduledoc.
When you push a mutation, you can have selections on that mutation result
to get back data you need, IE
```
mutation {
createUser(accountId: 1, name: "bob") {
id
account { name }
}
}
```
However, what if you want to know when OTHER people create a new user, so that
your UI can update as well. This is the point of subscriptions.
```
subscription {
newUsers {
id
account { name }
}
}
```
The job of the subscription macros then is to give you the tools to connect
subscription documents with the values that will drive them. In the last example
we would get all users for all accounts, but you could imagine wanting just
`newUsers(accountId: 2)`.
In your schema you articulate the interests of a subscription via the `config`
macro:
```
subscription do
field :new_users, :user do
arg :account_id, non_null(:id)
config fn args,_info ->
{:ok, topic: args.account_id}
end
end
end
```
The topic can be any term. You can broadcast a value manually to this subscription
by doing
```
Absinthe.Subscription.publish(pubsub, user, [new_users: user.account_id])
```
It's pretty common to want to associate particular mutations as the triggers
for one or more subscriptions, so Absinthe provides some macros to help with
that too.
```
subscription do
field :new_users, :user do
arg :account_id, non_null(:id)
config fn args, _info ->
{:ok, topic: args.account_id}
end
trigger :create_user, topic: fn user ->
user.account_id
end
end
end
```
The idea with a trigger is that it takes either a single mutation `:create_user`
or a list of mutations `[:create_user, :blah_user, ...]` and a topic function.
This function returns a value that is used to lookup documents on the basis of
the topic they returned from the `config` macro.
Note that a subscription field can have `trigger` as many trigger blocks as you
need, in the event that different groups of mutations return different results
that require different topic functions.
"""
defmacro subscription(raw_attrs \\ [name: @default_subscription_name], do: block) do
record_subscription(__CALLER__, raw_attrs, block)
end
defp record_subscription(env, raw_attrs, block) do
attrs =
raw_attrs
|> Keyword.put_new(:name, @default_subscription_name)
Absinthe.Schema.Notation.record!(env, @object_type, :subscription, attrs, block)
end
def __after_compile__(env, _) do
env.module.__absinthe_blueprint__
|> Absinthe.Pipeline.run(Absinthe.Pipeline.for_schema(env.module))
|> case do
{:ok, _, _} ->
[]
{:error, errors, _} ->
raise CompilationError, phase_errors: List.wrap(errors)
end
end
### Helpers
@doc """
Run the introspection query on a schema.
Convenience function.
"""
@spec introspect(schema :: t, opts :: Absinthe.run_opts()) :: Absinthe.run_result()
def introspect(schema, opts \\ []) do
[:code.priv_dir(:absinthe), "graphql", "introspection.graphql"]
|> Path.join()
|> File.read!()
|> Absinthe.run(schema, opts)
end
@doc """
Replace the default middleware
## Examples
Replace the default for all fields with a string lookup instead of an atom lookup:
```
def middleware(middleware, field, object) do
new_middleware = {Absinthe.Middleware.MapGet, to_string(field.identifier)}
middleware
|> Absinthe.Schema.replace_default(new_middleware, field, object)
end
```
"""
def replace_default(middleware_list, new_middleware, %{identifier: identifer}, _object) do
Enum.map(middleware_list, fn middleware ->
case middleware do
{Absinthe.Middleware.MapGet, ^identifer} ->
new_middleware
middleware ->
middleware
end
end)
end
def lookup_directive(schema, name) do
schema.__absinthe_directive__(name)
end
def lookup_type(schema, type, options \\ [unwrap: true]) do
cond do
is_atom(type) ->
schema.__absinthe_lookup__(type)
is_binary(type) ->
schema.__absinthe_lookup__(type)
Type.wrapped?(type) ->
if Keyword.get(options, :unwrap) do
lookup_type(schema, type |> Type.unwrap())
else
type
end
true ->
type
end
end
@doc """
Get all concrete types for union, interface, or object
"""
@spec concrete_types(t, Type.t()) :: [Type.t()]
def concrete_types(schema, %Type.Union{} = type) do
Enum.map(type.types, &lookup_type(schema, &1))
end
def concrete_types(schema, %Type.Interface{} = type) do
implementors(schema, type)
end
def concrete_types(_, %Type.Object{} = type) do
[type]
end
def concrete_types(_, type) do
[type]
end
@doc """
Get all types that are used by an operation
"""
@spec used_types(t) :: [Type.t()]
def used_types(schema) do
[:query, :mutation, :subscription]
|> Enum.map(&lookup_type(schema, &1))
|> Enum.concat(directives(schema))
|> Enum.filter(&(!is_nil(&1)))
|> Enum.flat_map(&Type.referenced_types(&1, schema))
|> MapSet.new()
|> Enum.map(&Schema.lookup_type(schema, &1))
end
@doc """
List all directives on a schema
"""
@spec directives(t) :: [Type.Directive.t()]
def directives(schema) do
schema.__absinthe_directives__
|> Map.keys()
|> Enum.map(&lookup_directive(schema, &1))
end
@doc """
List all implementors of an interface on a schema
"""
@spec implementors(t, Type.identifier_t() | Type.Interface.t()) :: [Type.Object.t()]
def implementors(schema, ident) when is_atom(ident) do
schema.__absinthe_interface_implementors__
|> Map.get(ident, [])
|> Enum.map(&lookup_type(schema, &1))
end
def implementors(schema, %Type.Interface{} = iface) do
implementors(schema, iface.__reference__.identifier)
end
@doc """
List all types on a schema
"""
@spec types(t) :: [Type.t()]
def types(schema) do
schema.__absinthe_types__
|> Map.keys()
|> Enum.map(&lookup_type(schema, &1))
end
@doc """
Get all introspection types
"""
@spec introspection_types(t) :: [Type.t()]
def introspection_types(schema) do
schema
|> Schema.types()
|> Enum.filter(&Type.introspection?/1)
end
end
defmodule Absinthe.Middleware do
@moduledoc """
Middleware enables custom resolution behaviour on a field.
All resolution happens through middleware. Even `resolve` functions are
middleware, as the `resolve` macro is just
```
quote do
middleware Absinthe.Resolution, unquote(function_ast)
end
```
Resolution happens by reducing a list of middleware spec onto an
`%Absinthe.Resolution{}` struct.
## Example
```
defmodule MyApp.Web.Authentication do
@behaviour Absinthe.Middleware
def call(resolution, _config) do
case resolution.context do
%{current_user: _} ->
resolution
_ ->
resolution
|> Absinthe.Resolution.put_result({:error, "unauthenticated"})
end
end
end
```
By specifying `@behaviour Absinthe.Middleware` the compiler will ensure that
we provide a `def call` callback. This function takes an
`%Absinthe.Resolution{}` struct and will also need to return one such struct.
On that struct there is a `context` key which holds the absinthe context. This
is generally where things like the current user are placed. For more
information on how the current user ends up in the context please see our full
authentication guide on the website.
Our `call/2` function simply checks the context to see if there is a current
user. If there is, we pass the resolution onward. If there is not, we update
the resolution state to `:resolved` and place an error result.
Middleware can be placed on a field in three different ways:
1. Using the `Absinthe.Schema.Notation.middleware/2`
macro used inside a field definition
2. Using the `middleware/3` callback in your schema.
3. Returning a `{:middleware, middleware_spec, config}`
tuple from a resolution function.
## The `middleware/2` macro
For placing middleware on a particular field, it's handy to use
the `middleware/2` macro.
Middleware will be run in the order in which they are specified.
The `middleware/3` callback has final say on what middleware get
set.
Examples
`MyApp.Web.Authentication` would run before resolution, and `HandleError` would run after.
```
field :hello, :string do
middleware MyApp.Web.Authentication
resolve &get_the_string/2
middleware HandleError, :foo
end
```
Anonymous functions are a valid middleware spec. A nice use case
is altering the context in a logout mutation. Mutations are the
only time the context should be altered. This is not enforced.
```
field :logout, :query do
middleware fn res, _ ->
%{res |
context: Map.delete(res.context, :current_user),
value: "logged out",
state: :resolved
}
end
end
```
`middleware/2` even accepts local public function names. Note
that `middleware/2` is the only thing that can take local function
names without an associated module. If not using macros, use
`{{__MODULE__, :function_name}, []}`
```
def auth(res, _config) do
# auth logic here
end
query do
field :hello, :string do
middleware :auth
resolve &get_the_string/2
end
end
```
## The `middleware/3` callback.
`middleware/3` is a function callback on a schema. When you `use
Absinthe.Schema` a default implementation of this function is placed in your
schema. It is passed the existing middleware for a field, the field itself,
and the object that the field is a part of.
So for example if your schema contained:
```
object :user do
field :name, :string
field :age, :integer
end
query do
field :lookup_user, :user do
resolve fn _, _ ->
{:ok, %{name: "Bob"}}
end
end
end
def middleware(middleware, field, object) do
middleware |> IO.inspect
field |> IO.inspect
object |> IO.inspect
middleware
end
```
Given a document like:
```
{ lookupUser { name }}
```
`object` is each object that is accessed while executing the document. In our
case that is the `:user` object and the `:query` object. `field` is every
field on that object, and middleware is a list of whatever middleware
spec have been configured by the schema on that field. Concretely
then, the function will be called , with the following arguments:
```
YourSchema.middleware([{Absinthe.Resolution, #Function<20.52032458/0>}], lookup_user_field_of_root_query_object, root_query_object)
YourSchema.middleware([{Absinthe.Middleware.Map.Get, :name}], name_field_of_user, user_object)
YourSchema.middleware([{Absinthe.Middleware.Map.Get, :age}], age_field_of_user, user_object)
```
In the latter two cases we see that the middleware list is empty. In the first
case we see one middleware spec, which is placed by the `resolve` macro used in the
`:lookup_user` field.
### Default Middleware
One use of `middleware/3` is setting the default middleware on a field
By default middleware is placed on a
field that looks up a field by its snake case identifier, ie `:resource_name`
Here is an example of how to change the default to use a camel cased string,
IE, "resourceName".
```
def middleware(middleware, %{identifier: identifier} = field, object) do
camelized =
identifier
|> Atom.to_string
|> Macro.camelize
new_middleware_spec = {{__MODULE__, :get_camelized_key}, camelized}
Absinthe.Schema.replace_default(middleware, new_middleware_spec, field, object)
end
def get_camelized_key(%{source: source} = res, key) do
%{res | state: :resolved, value: Map.get(source, key)}
end
```
There's a lot going on here so let's unpack it. We need to define a
specification to tell Absinthe what middleware to run. The form we're using is
`{{MODULE, :function_to_call}, options_of_middleware}`. For our purposes we're
simply going to use a function in the schema module itself
`get_camelized_key`.
We then use the `Absinthe.Schema.replace_default/4` function to swap out the
default middleware already present in the middleware list with the new one we
want to use. It handles going through the existing list of middleware and
seeing if it's using the default or if it has custom resolvers on it. If it's
using the default, the function applies our newly defined middleware spec.
Like all middleware functions, `:get_camelized_key` takes a resolution struct,
and options. The options is the camelized key we generated. We get the
camelized string from the parent map, and set it as the value of the
resolution struct. Finally we mark the resolution state `:resolved`.
Side note: This `middleware/3` function is called whenever we pull the type
out of the schema. The middleware itself is run every time we get a field on
an object. If we have 1000 objects and we were doing the camelization logic
INSIDE the middleware, we would compute the camelized string 1000 times. By
doing it in the `def middleware` callback we do it just once.
### Changes Since 1.3
In Absinthe 1.3, fields without any `middleware/2` or `resolve/1` calls would
show up with an empty list `[]` as its middleware in the `middleware/3`
function. If no middleware was applied in the function and it also returned `[]`,
THEN Absinthe would apply the default.
This made it very easy to accidently break your schema if you weren't
particularly careful with your pattern matching. Now the defaults are applied
FIRST by absinthe, and THEN passed to `middleware/3`. Consequently, the
middleware list argument should always have at least one value. This is also
why there is now the `replace_default/4` function, because it handles telling
the difference between a field with a resolver and a field with the default.
### Object Wide Authentication
Let's use our authentication middleware from earlier, and place it on every
field in the query object.
```
defmodule MyApp.Web.Schema do
use Absinthe.Schema
query do
field :private_field, :string do
resolve fn _, _ ->
{:ok, "this can only be viewed if authenticated"}
end
end
end
def middleware(middleware, _field, %Absinthe.Type.Object{identifier: identifier})
when identifier in [:query, :subscription, :mutation] do
[MyApp.Web.Authentication | middleware]
end
def middleware(middleware, _field, _object) do
middleware
end
end
```
It is important to note that we are matching for the `:query`, `:subscription`
or `:mutation` identifier types. We do this because the middleware function
will be called for each field in the schema. If we didn't limit it to those
types, we would be applying authentication to every field in the entire
schema, even stuff like `:name` or `:age`. This generally isn't necessary
provided you authenticate at the entrypoints.
## Main Points
- Middleware functions take a `%Absinthe.Resolution{}` struct, and return one.
- All middleware on a field are always run, make sure to pattern match on the
state if you care.
"""
@type function_name :: atom
@type spec ::
module
| {module, term}
| {{module, function_name}, term}
| (Absinthe.Resolution.t(), term -> Absinthe.Resolution.t())
@doc """
This is the main middleware callback.
It receives an `%Absinthe.Resolution{}` struct and it needs to return an
`%Absinthe.Resolution{}` struct. The second argument will be whatever value
was passed to the `middleware` call that setup the middleware.
"""
@callback call(Absinthe.Resolution.t(), term) :: Absinthe.Resolution.t()
@doc false
def shim(res, {:ref, module, {_namespace, {object, field}} = ref}) do
schema = res.schema
object = Absinthe.Schema.lookup_type(schema, object)
field = Map.fetch!(object.fields, field)
middleware = module.__absinthe_function__(ref, :middleware)
middleware = expand(schema, middleware, field, object)
%{res | middleware: middleware ++ res.middleware}
end
@doc false
def expand(schema, middleware, field, object) do
middleware =
Absinthe.Schema.Notation.__ensure_middleware__(
middleware,
field,
object
)
schema.middleware(middleware, field, object)
end
end
defmodule Absinthe.Utils do
@doc """
Camelize a word, respecting underscore prefixes.
## Examples
With an uppercase first letter:
```
iex> camelize("foo_bar")
"FooBar"
iex> camelize("foo")
"Foo"
iex> camelize("__foo_bar")
"__FooBar"
iex> camelize("__foo")
"__Foo"
iex> camelize("_foo")
"_Foo"
```
With a lowercase first letter:
```
iex> camelize("foo_bar", lower: true)
"fooBar"
iex> camelize("foo", lower: true)
"foo"
iex> camelize("__foo_bar", lower: true)
"__fooBar"
iex> camelize("__foo", lower: true)
"__foo"
iex> camelize("_foo", lower: true)
"_foo"
```
"""
@spec camelize(binary, Keyword.t()) :: binary
def camelize(word, opts \\ [])
def camelize("_" <> word, opts) do
"_" <> camelize(word, opts)
end
def camelize(word, opts) do
case opts |> Enum.into(%{}) do
%{lower: true} ->
{first, rest} = String.split_at(Macro.camelize(word), 1)
String.downcase(first) <> rest
_ ->
Macro.camelize(word)
end
end
@doc false
@spec escapable?(any()) :: boolean()
def escapable?(value) do
# if this doesn't blow up, the value can be escaped
_ = Macro.escape(value)
true
rescue
_ ->
false
end
@doc false
def placement_docs([{_, placement} | _]) do
placement
|> do_placement_docs
end
defp do_placement_docs(toplevel: true) do
"""
Top level in module.
"""
end
defp do_placement_docs(toplevel: false) do
"""
Allowed under any block. Not allowed to be top level
"""
end
defp do_placement_docs(under: under) when is_list(under) do
under =
under
|> Enum.sort_by(& &1)
|> Enum.map(&"`#{&1}`")
|> Enum.join(" ")
"""
Allowed under: #{under}
"""
end
defp do_placement_docs(under: under) do
do_placement_docs(under: [under])
end
@doc false
def describe_builtin_module(module) do
title =
module
|> Module.split()
|> List.last()
types =
module.__absinthe_types__
|> Map.keys()
|> Enum.sort_by(& &1)
|> Enum.map(fn identifier ->
type = module.__absinthe_type__(identifier)
"""
## #{type.name}
Identifier: `#{inspect(identifier)}`
#{type.description}
"""
end)
directives =
module.__absinthe_directives__
|> Map.keys()
|> Enum.sort_by(& &1)
|> Enum.map(fn identifier ->
directive = module.__absinthe_directive__(identifier)
"""
## #{directive.name}
Identifier: `#{inspect(identifier)}`
#{directive.description}
"""
end)
"""
# #{title}
#{types ++ directives}
"""
end
end
defmodule Absinthe.Language do
@moduledoc false
alias Absinthe.Language
alias __MODULE__
@type t ::
Language.Argument.t()
| Language.BooleanValue.t()
| Language.Directive.t()
| Language.Document.t()
| Language.EnumTypeDefinition.t()
| Language.EnumValue.t()
| Language.Field.t()
| Language.FieldDefinition.t()
| Language.FloatValue.t()
| Language.Fragment.t()
| Language.FragmentSpread.t()
| Language.InlineFragment.t()
| Language.InputObjectTypeDefinition.t()
| Language.InputValueDefinition.t()
| Language.IntValue.t()
| Language.InterfaceTypeDefinition.t()
| Language.ListType.t()
| Language.ListValue.t()
| Language.NamedType.t()
| Language.NonNullType.t()
| Language.ObjectField.t()
| Language.ObjectTypeDefinition.t()
| Language.ObjectValue.t()
| Language.OperationDefinition.t()
| Language.ScalarTypeDefinition.t()
| Language.SelectionSet.t()
| Language.Source.t()
| Language.StringValue.t()
| Language.TypeExtensionDefinition.t()
| Language.UnionTypeDefinition.t()
| Language.Variable.t()
| Language.VariableDefinition.t()
# Value nodes
@type value_t ::
Language.Variable.t()
| Language.IntValue.t()
| Language.FloatValue.t()
| Language.StringValue.t()
| Language.BooleanValue.t()
| Language.EnumValue.t()
| Language.ListValue.t()
| Language.ObjectValue.t()
# Type reference nodes
@type type_reference_t ::
Language.NamedType.t() | Language.ListType.t() | Language.NonNullType.t()
# Type definition nodes
@type type_definition_t ::
Language.ObjectTypeDefinition.t()
| Language.InterfaceTypeDefinition.t()
| Language.UnionTypeDefinition.t()
| Language.ScalarTypeDefinition.t()
| Language.EnumTypeDefinition.t()
| Language.InputObjectTypeDefinition.t()
| Language.TypeExtensionDefinition.t()
@type loc_t :: %{line: pos_integer, column: pos_integer}
@type input_t ::
Language.BooleanValue
| Language.EnumValue
| Language.FloatValue
| Language.IntValue
| Language.ListValue
| Language.ObjectValue
| Language.StringValue
| Language.Variable
# Unwrap an AST type from a NonNullType
@doc false
@spec unwrap(Language.NonNullType.t() | t) :: t
def unwrap(%Language.NonNullType{type: t}), do: t
def unwrap(type), do: type
end
defmodule Absinthe.Pipeline do
@moduledoc """
Execute a pipeline of phases.
A pipeline is merely a list of phases. This module contains functions for building,
modifying, and executing pipelines of phases.
"""
alias Absinthe.Phase
require Logger
@type data_t :: any
@type phase_config_t :: Phase.t() | {Phase.t(), Keyword.t()}
@type t :: [phase_config_t | [phase_config_t]]
@spec run(data_t, t) :: {:ok, data_t, [Phase.t()]} | {:error, String.t(), [Phase.t()]}
def run(input, pipeline) do
pipeline
|> List.flatten()
|> run_phase(input)
end
@defaults [
adapter: Absinthe.Adapter.LanguageConventions,
operation_name: nil,
variables: %{},
context: %{},
root_value: %{},
validation_result_phase: Phase.Document.Validation.Result,
result_phase: Phase.Document.Result,
jump_phases: true
]
def options(overrides \\ []) do
Keyword.merge(@defaults, overrides)
end
@spec for_document(Absinthe.Schema.t()) :: t
@spec for_document(Absinthe.Schema.t(), Keyword.t()) :: t
def for_document(schema, options \\ []) do
options = options(Keyword.put(options, :schema, schema))
[
# Parse Document
{Phase.Parse, options},
# Convert to Blueprint
{Phase.Blueprint, options},
# Find Current Operation (if any)
{Phase.Document.Validation.ProvidedAnOperation, options},
{Phase.Document.CurrentOperation, options},
# Mark Fragment/Variable Usage
Phase.Document.Uses,
# Validate Document Structure
{Phase.Document.Validation.NoFragmentCycles, options},
Phase.Document.Validation.LoneAnonymousOperation,
Phase.Document.Validation.SelectedCurrentOperation,
Phase.Document.Validation.KnownFragmentNames,
Phase.Document.Validation.NoUndefinedVariables,
Phase.Document.Validation.NoUnusedVariables,
# TODO: uncomment in 1.5
# Phase.Document.Validation.NoUnusedFragments
Phase.Document.Validation.UniqueFragmentNames,
Phase.Document.Validation.UniqueOperationNames,
Phase.Document.Validation.UniqueVariableNames,
# Apply Input
{Phase.Document.Context, options},
{Phase.Document.Variables, options},
Phase.Document.Validation.ProvidedNonNullVariables,
Phase.Document.Arguments.Normalize,
# Map to Schema
{Phase.Schema, options},
# Ensure Types
Phase.Validation.KnownTypeNames,
# Process Arguments
Phase.Document.Arguments.CoerceEnums,
Phase.Document.Arguments.CoerceLists,
{Phase.Document.Arguments.Parse, options},
Phase.Document.MissingVariables,
Phase.Document.MissingLiterals,
Phase.Document.Arguments.FlagInvalid,
# Validate Full Document
Phase.Validation.KnownDirectives,
Phase.Document.Validation.ScalarLeafs,
Phase.Document.Validation.VariablesAreInputTypes,
Phase.Document.Validation.ArgumentsOfCorrectType,
Phase.Document.Validation.KnownArgumentNames,
Phase.Document.Validation.ProvidedNonNullArguments,
Phase.Document.Validation.UniqueArgumentNames,
Phase.Document.Validation.UniqueInputFieldNames,
Phase.Document.Validation.FieldsOnCorrectType,
Phase.Document.Validation.OnlyOneSubscription,
# Check Validation
{Phase.Document.Validation.Result, options},
# Prepare for Execution
Phase.Document.Arguments.Data,
# Apply Directives
Phase.Document.Directives,
# Analyse Complexity
{Phase.Document.Complexity.Analysis, options},
{Phase.Document.Complexity.Result, options},
# Execution
{Phase.Subscription.SubscribeSelf, options},
{Phase.Document.Execution.Resolution, options},
# Format Result
Phase.Document.Result
]
end
@spec for_schema(nil | Absinthe.Schema.t()) :: t
@spec for_schema(nil | Absinthe.Schema.t(), Keyword.t()) :: t
def for_schema(schema, _options \\ []) do
[
Phase.Schema.TypeImports,
Phase.Schema.ValidateTypeReferences,
Phase.Schema.FieldImports,
{Phase.Schema.Decorate, [schema: schema]},
Phase.Validation.KnownTypeNames,
Phase.Schema.RegisterTriggers,
Phase.Schema.Validation.Result,
Phase.Schema.Build,
Phase.Schema.InlineFunctions,
{Phase.Schema.Compile, [module: schema]}
]
end
@doc """
Return the part of a pipeline before a specific phase.
"""
@spec before(t, atom) :: t
def before(pipeline, phase) do
result =
List.flatten(pipeline)
|> Enum.take_while(&(!match_phase?(phase, &1)))
case result do
^pipeline ->
raise RuntimeError, "Could not find phase #{phase}"
_ ->
result
end
end
@doc """
Return the part of a pipeline after (and including) a specific phase.
"""
@spec from(t, atom) :: t
def from(pipeline, phase) do
result =
List.flatten(pipeline)
|> Enum.drop_while(&(!match_phase?(phase, &1)))
case result do
[] ->
raise RuntimeError, "Could not find phase #{phase}"
_ ->
result
end
end
@doc """
Replace a phase in a pipeline with another, supporting reusing the same
options.
## Examples
Replace a simple phase (without options):
iex> Pipeline.replace([A, B, C], B, X)
[A, X, C]
Replace a phase with options, retaining them:
iex> Pipeline.replace([A, {B, [name: "Thing"]}, C], B, X)
[A, {X, [name: "Thing"]}, C]
Replace a phase with options, overriding them:
iex> Pipeline.replace([A, {B, [name: "Thing"]}, C], B, {X, [name: "Nope"]})
[A, {X, [name: "Nope"]}, C]
"""
@spec replace(t, Phase.t(), phase_config_t) :: t
def replace(pipeline, phase, replacement) do
Enum.map(pipeline, fn candidate ->
case match_phase?(phase, candidate) do
true ->
case phase_invocation(candidate) do
{_, []} ->
replacement
{_, opts} ->
case is_atom(replacement) do
true ->
{replacement, opts}
false ->
replacement
end
end
false ->
candidate
end
end)
end
# Whether a phase configuration is for a given phase
@spec match_phase?(Phase.t(), phase_config_t) :: boolean
defp match_phase?(phase, phase), do: true
defp match_phase?(phase, {phase, _}), do: true
defp match_phase?(_, _), do: false
@doc """
Return the part of a pipeline up to and including a specific phase.
"""
@spec upto(t, atom) :: t
def upto(pipeline, phase) do
beginning = before(pipeline, phase)
item = get_in(pipeline, [Access.at(length(beginning))])
beginning ++ [item]
end
@spec without(t, Phase.t()) :: t
def without(pipeline, phase) do
pipeline
|> Enum.filter(&(not match_phase?(phase, &1)))
end
@spec insert_before(t, Phase.t(), phase_config_t | [phase_config_t]) :: t
def insert_before(pipeline, phase, additional) do
beginning = before(pipeline, phase)
beginning ++ List.wrap(additional) ++ (pipeline -- beginning)
end
@spec insert_after(t, Phase.t(), phase_config_t | [phase_config_t]) :: t
def insert_after(pipeline, phase, additional) do
beginning = upto(pipeline, phase)
beginning ++ List.wrap(additional) ++ (pipeline -- beginning)
end
@spec reject(t, Regex.t() | (Module.t() -> boolean)) :: t
def reject(pipeline, %Regex{} = pattern) do
reject(pipeline, fn phase ->
Regex.match?(pattern, Atom.to_string(phase))
end)
end
def reject(pipeline, fun) do
Enum.reject(pipeline, fn
{phase, _} -> fun.(phase)
phase -> fun.(phase)
end)
end
@spec run_phase(t, data_t, [Phase.t()]) ::
{:ok, data_t, [Phase.t()]} | {:error, String.t(), [Phase.t()]}
def run_phase(pipeline, input, done \\ [])
def run_phase([], input, done) do
{:ok, input, done}
end
def run_phase([phase_config | todo], input, done) do
{phase, options} = phase_invocation(phase_config)
case phase.run(input, options) do
{:ok, result} ->
run_phase(todo, result, [phase | done])
{:jump, result, destination_phase} when is_atom(destination_phase) ->
run_phase(from(todo, destination_phase), result, [phase | done])
{:insert, result, extra_pipeline} ->
run_phase(List.wrap(extra_pipeline) ++ todo, result, [phase | done])
{:swap, result, target, replacements} ->
todo
|> replace(target, replacements)
|> run_phase(result, [phase | done])
{:replace, result, final_pipeline} ->
run_phase(List.wrap(final_pipeline), result, [phase | done])
{:error, message} ->
{:error, message, [phase | done]}
_ ->
{:error, "Last phase did not return a valid result tuple.", [phase | done]}
end
end
@spec phase_invocation(phase_config_t) :: {Phase.t(), list}
defp phase_invocation({phase, options}) when is_list(options) do
{phase, options}
end
defp phase_invocation(phase) do
{phase, []}
end
end
defmodule Absinthe.Introspection do
@moduledoc """
Introspection support.
You can introspect your schema using `__schema`, `__type`, and `__typename`,
as [described in the specification](https://facebook.github.io/graphql/#sec-Introspection).
## Examples
Seeing the names of the types in the schema:
```
\"""
{
__schema {
types {
name
}
}
}
\"""
|> Absinthe.run(MyApp.Schema)
{:ok,
%{data: %{
"__schema" => %{
"types" => [
%{"name" => "Boolean"},
%{"name" => "Float"},
%{"name" => "ID"},
%{"name" => "Int"},
%{"name" => "String"},
...
]
}
}}
}
```
Getting the name of the queried type:
```
\"""
{
profile {
name
__typename
}
}
\"""
|> Absinthe.run(MyApp.Schema)
{:ok,
%{data: %{
"profile" => %{
"name" => "Joe",
"__typename" => "Person"
}
}}
}
```
Getting the name of the fields for a named type:
```
\"""
{
__type(name: "Person") {
fields {
name
type {
kind
name
}
}
}
}
\"""
|> Absinthe.run(MyApp.Schema)
{:ok,
%{data: %{
"__type" => %{
"fields" => [
%{
"name" => "name",
"type" => %{"kind" => "SCALAR", "name" => "String"}
},
%{
"name" => "age",
"type" => %{"kind" => "SCALAR", "name" => "Int"}
},
]
}
}}
}
```
(Note that you may have to nest several depths of `type`/`ofType`, as
type information includes any wrapping layers of [List](https://facebook.github.io/graphql/#sec-List)
and/or [NonNull](https://facebook.github.io/graphql/#sec-Non-null).)
"""
alias Absinthe.Type
# Determine if a term is an introspection type
@doc false
@spec type?(any) :: boolean
def type?(%Type.Object{name: "__" <> _}), do: true
def type?(_), do: false
end
defmodule Absinthe.Blueprint.Result.Leaf do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:emitter, :value]
defstruct [
:emitter,
:value,
errors: [],
flags: %{},
extensions: %{}
]
@type t :: %__MODULE__{
emitter: Blueprint.Document.Field.t(),
value: Blueprint.Document.Resolution.node_t(),
errors: [Phase.Error.t()],
flags: Blueprint.flags_t(),
extensions: %{any => any}
}
end
defmodule Absinthe.Blueprint.Result.List do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:emitter, :values]
defstruct [
:emitter,
:values,
errors: [],
flags: %{},
extensions: %{}
]
@type t :: %__MODULE__{
emitter: Blueprint.Document.Field.t(),
values: [Blueprint.Document.Resolution.node_t()],
errors: [Phase.Error.t()],
flags: Blueprint.flags_t(),
extensions: %{any => any}
}
end
defmodule Absinthe.Blueprint.Result.Object do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:emitter, :root_value]
defstruct [
:root_value,
:emitter,
:fields,
errors: [],
flags: %{},
extensions: %{}
]
@type t :: %__MODULE__{
emitter: Blueprint.Document.Field.t(),
fields: [Blueprint.Document.Resolution.node_t()],
errors: [Phase.Error.t()],
flags: Blueprint.flags_t(),
extensions: %{any => any}
}
end
defmodule Absinthe.Blueprint.TypeReference do
@moduledoc false
alias __MODULE__
@type t ::
TypeReference.List.t()
| TypeReference.Name.t()
| TypeReference.Identifier.t()
| TypeReference.NonNull.t()
@wrappers [TypeReference.List, TypeReference.NonNull]
@doc """
Unwrap a type reference from surrounding NonNull/List type information.
"""
@spec unwrap(t) :: t
def unwrap(%TypeReference.Name{} = value) do
value
end
@spec unwrap(t) :: t
def unwrap(%TypeReference.Identifier{} = value) do
value
end
def unwrap(%struct{of_type: inner}) when struct in @wrappers do
unwrap(inner)
end
def to_type(%__MODULE__.NonNull{of_type: type}, schema) do
%Absinthe.Type.NonNull{of_type: to_type(type, schema)}
end
def to_type(%__MODULE__.List{of_type: type}, schema) do
%Absinthe.Type.List{of_type: to_type(type, schema)}
end
def to_type(%__MODULE__.Name{name: name}, schema) do
Enum.find(schema.type_definitions, &(&1.name == name)).identifier
end
def to_type(%__MODULE__.Identifier{id: id}, _) when is_atom(id) do
id
end
def to_type(value, _) when is_atom(value) do
value
end
end
defmodule Absinthe.Blueprint.Input.Null do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
defstruct [
:source_location,
# Added by phases
flags: %{},
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
flags: Blueprint.flags_t(),
schema_node: nil | Absinthe.Type.t(),
source_location: Blueprint.SourceLocation.t(),
errors: [Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Input.Float do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:value]
defstruct [
:value,
:source_location,
# Added by phases
flags: %{},
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
value: float,
flags: Blueprint.flags_t(),
source_location: Blueprint.SourceLocation.t(),
schema_node: nil | Absinthe.Type.t(),
errors: [Absinthe.Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Input.Boolean do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:value]
defstruct [
:value,
:source_location,
# Added by phases
flags: %{},
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
value: true | false,
flags: Blueprint.flags_t(),
schema_node: nil | Absinthe.Type.t(),
source_location: Blueprint.SourceLocation.t(),
errors: [Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Input.Variable.Use do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name, :source_location]
defstruct [
:name,
:source_location
]
@type t :: %__MODULE__{
name: String.t(),
source_location: nil | Blueprint.SourceLocation.t()
}
end
defmodule Absinthe.Blueprint.Input.Field do
@moduledoc false
alias Absinthe.{Blueprint, Type}
@enforce_keys [:name, :input_value]
defstruct [
:name,
:input_value,
# Added by phases
flags: %{},
source_location: nil,
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
name: String.t(),
input_value: Blueprint.Input.Value.t(),
flags: Blueprint.flags_t(),
schema_node: nil | Type.Field.t(),
source_location: Blueprint.SourceLocation.t(),
errors: [Absinthe.Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Input.Argument do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name, :source_location, :input_value]
defstruct [
:name,
:input_value,
:source_location,
# Added by phases
schema_node: nil,
# Value converted to native elixir value
value: nil,
flags: %{},
errors: []
]
@type t :: %__MODULE__{
name: String.t(),
input_value: Blueprint.Input.Value.t(),
source_location: Blueprint.SourceLocation.t(),
schema_node: nil | Absinthe.Type.Argument.t(),
value: any,
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
@spec value_map([t]) :: %{atom => any}
def value_map(arguments) do
arguments
|> Enum.filter(fn
%__MODULE__{schema_node: nil} ->
false
%__MODULE__{input_value: %{normalized: %Blueprint.Input.Null{}}, value: nil} ->
true
%__MODULE__{value: nil} ->
false
arg ->
arg
end)
|> Map.new(&{&1.schema_node.identifier, &1.value})
end
end
defmodule Absinthe.Blueprint.Input.Generated do
@enforce_keys [:by]
defstruct [:by]
@moduledoc false
# A number of phases need to check for `nil` normalized values. This is problematic
# for situations where a value has been generated from a default value. This struct
# can be placed on the normalized value to indicate that it is not null, but also
# that it is not a proper blueprint input.
end
defmodule Absinthe.Blueprint.Input.List do
@moduledoc false
alias Absinthe.{Blueprint, Phase, Type}
@enforce_keys [:items]
defstruct [
:items,
:source_location,
# Added by phases
flags: %{},
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
items: [Blueprint.Input.Value.t()],
flags: Blueprint.flags_t(),
schema_node: nil | Absinthe.Type.t(),
source_location: Blueprint.SourceLocation.t(),
errors: [Phase.Error.t()]
}
@doc """
Wrap another input node in a list.
"""
@spec wrap(Blueprint.Input.t(), Absinthe.Type.List.t()) :: t
def wrap(%__MODULE__{} = list, _), do: list
def wrap(node, list_schema_node) do
%__MODULE__{
items: wrapped_items(node),
source_location: node.source_location,
schema_node: list_schema_node
}
end
@spec wrapped_items(Blueprint.Input.t()) :: [] | [Blueprint.Input.Value.t()]
defp wrapped_items(%Blueprint.Input.Null{}) do
[]
end
defp wrapped_items(node) do
[
%Blueprint.Input.Value{
raw: %Blueprint.Input.RawValue{content: node},
normalized: node,
schema_node: Type.unwrap(node.schema_node)
}
]
end
end
defmodule Absinthe.Blueprint.Input.Object do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:fields]
defstruct [
:source_location,
fields: [],
# Added by phases
flags: %{},
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
fields: [Blueprint.Input.Field.t()],
flags: Blueprint.flags_t(),
schema_node:
nil
| Absinthe.Type.InputObject.t()
| Absinthe.Type.NonNull.t(Absinthe.Type.InputObject.t()),
source_location: Blueprint.SourceLocation.t(),
errors: [Absinthe.Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Input.RawValue do
@moduledoc false
@enforce_keys [:content]
defstruct [
:content
]
end
defmodule Absinthe.Blueprint.Input.String do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:value]
defstruct [
:value,
:source_location,
# Added by phases
flags: %{},
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
value: String.t(),
flags: Blueprint.flags_t(),
schema_node: nil | Absinthe.Type.t(),
source_location: Blueprint.SourceLocation.t(),
errors: [Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Input.Enum do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:value, :source_location]
defstruct [
:value,
:source_location,
# Added by phases
flags: %{},
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
value: String.t(),
flags: Blueprint.flags_t(),
schema_node: nil | Absinthe.Type.t(),
source_location: Blueprint.SourceLocation.t(),
errors: [Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Input.Value do
@moduledoc false
# An input in a document.
#
# Used by arguments, input object fields, and input lists.
@enforce_keys [:raw, :normalized]
defstruct [
:schema_node,
:raw,
:normalized,
:data
]
alias Absinthe.Blueprint.Input
@type variable :: Input.Variable.t()
@type literals ::
Input.Integer.t()
| Input.Float.t()
| Input.Enum.t()
| Input.String.t()
| Input.Boolean.t()
| Input.List.t()
| Input.Object.t()
| variable
@type t :: %__MODULE__{
raw: Input.RawValue.t(),
normalized: literals,
data: term
}
@spec valid?(t) :: boolean
@doc false
# Whether a value is valid and useful in an argument
def valid?(%{normalized: %Absinthe.Blueprint.Input.Null{}}), do: true
def valid?(%{normalized: nil}), do: false
def valid?(%{normalized: _}), do: true
end
defmodule Absinthe.Blueprint.Input.Integer do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:value]
defstruct [
:value,
:source_location,
# Added by phases
flags: %{},
schema_node: nil,
errors: []
]
@type t :: %__MODULE__{
value: integer,
flags: Blueprint.flags_t(),
source_location: Blueprint.SourceLocation.t(),
schema_node: nil | Absinthe.Type.t(),
errors: [Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Input.Variable do
@moduledoc false
alias __MODULE__
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:name]
defstruct [
:name,
source_location: nil,
# Added by phases
flags: %{},
errors: []
]
@type t :: %__MODULE__{
name: String.t(),
source_location: nil | Blueprint.SourceLocation.t(),
# Added by phases
flags: Blueprint.flags_t(),
errors: [Phase.Error.t()]
}
@doc """
Generate a use reference for a variable.
"""
@spec to_use(t) :: Variable.Use.t()
def to_use(%__MODULE__{} = node) do
%Variable.Use{
name: node.name,
source_location: node.source_location
}
end
end
defmodule Absinthe.Blueprint.TypeReference.List do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:of_type]
defstruct [
:of_type,
errors: []
]
@type t :: %__MODULE__{
of_type: Blueprint.TypeReference.t(),
errors: [Absinthe.Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.TypeReference.Identifier do
@moduledoc false
alias Absinthe.Phase
@enforce_keys [:id]
defstruct [
:id,
:schema_node,
errors: []
]
@type t :: %__MODULE__{
id: any(),
errors: [Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.TypeReference.NonNull do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:of_type]
defstruct [
:of_type,
errors: []
]
@type t :: %__MODULE__{
of_type: Blueprint.TypeReference.t(),
errors: [Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.TypeReference.Name do
@moduledoc false
alias Absinthe.Phase
@enforce_keys [:name]
defstruct [
:name,
:schema_node,
errors: []
]
@type t :: %__MODULE__{
name: String.t(),
errors: [Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Execution do
@moduledoc """
Blueprint Execution Data
The `%Absinthe.Blueprint.Execution{}` struct holds on to the core values that
drive a document's execution.
Here's how the execution flow works. Given a document like:
```
{
posts {
title
author { name }
}
}
```
After all the validation happens, and we're actually going to execute this document,
an `%Execution{}` struct is created. This struct is passed to each plugin's
`before_resolution` callback, so that plugins can set initial values in the accumulator
or context.
Then the resolution phase walks the document until it hits the `posts` field.
To resolve the posts field, an `%Absinthe.Resolution{}` struct is created from
the `%Execution{}` struct. This resolution struct powers the normal middleware
resolution process. When a field has resolved, the `:acc`, `:context`, and `:field_cache`
values within the resolution struct are pulled out and used to update the execution.
"""
alias Absinthe.Phase
@type acc :: map
defstruct [
:adapter,
:root_value,
:schema,
fragments: %{},
fields_cache: %{},
validation_errors: [],
result: nil,
acc: %{},
context: %{},
root_value: %{}
]
@type t :: %__MODULE__{
validation_errors: [Phase.Error.t()],
result: nil | Result.Object.t(),
acc: acc
}
@type node_t ::
Result.Object
| Result.List
| Result.Leaf
def get(%{execution: %{result: nil} = exec} = bp_root, operation) do
result = %Absinthe.Blueprint.Result.Object{
root_value: exec.root_value,
emitter: operation
}
%{
exec
| result: result,
adapter: bp_root.adapter,
schema: bp_root.schema,
fragments: Map.new(bp_root.fragments, &{&1.name, &1})
}
end
def get(%{execution: exec}, _) do
exec
end
def get_result(%__MODULE__{result: nil, root_value: root_value}, operation) do
%Absinthe.Blueprint.Result.Object{
root_value: root_value,
emitter: operation
}
end
def get_result(%{result: result}, _, _) do
result
end
def update(resolution, result, context, acc) do
%{resolution | context: context, result: result, acc: acc}
end
end
defmodule Absinthe.Blueprint.SourceLocation do
@moduledoc false
@enforce_keys [:line, :column]
defstruct [
:line,
:column
]
@type t :: %__MODULE__{
line: pos_integer,
column: pos_integer
}
@doc """
Generate a `SourceLocation.t()` given a location
"""
@spec at(loc :: Absinthe.Language.loc_t()) :: t
def at(loc) do
%__MODULE__{line: loc.line, column: loc.column}
end
@doc """
Generate a `SourceLocation.t()` given line and column numbers
"""
@spec at(line :: pos_integer, column :: pos_integer) :: t
def at(line, column) do
%__MODULE__{line: line, column: column}
end
end
defmodule Absinthe.Blueprint.Document.VariableDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Type}
@enforce_keys [:name, :type]
defstruct [
:name,
:type,
default_value: nil,
source_location: nil,
# Added by phases
flags: %{},
provided_value: nil,
errors: [],
schema_node: nil
]
@type t :: %__MODULE__{
name: String.t(),
type: Blueprint.TypeReference.t(),
default_value: Blueprint.Input.t(),
source_location: nil | Blueprint.SourceLocation.t(),
provided_value: nil | Blueprint.Input.t(),
errors: [Absinthe.Phase.Error.t()],
flags: Blueprint.flags_t(),
schema_node: Type.t()
}
end
defmodule Absinthe.Blueprint.Document.Operation do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name, :type]
defstruct [
:name,
:type,
current: false,
selections: [],
directives: [],
variable_definitions: [],
variable_uses: [],
fragment_uses: [],
source_location: nil,
# Populated by phases
flags: %{},
schema_node: nil,
complexity: nil,
provided_values: %{},
errors: []
]
@type t :: %__MODULE__{
name: nil | String.t(),
type: :query | :mutation | :subscription,
current: boolean,
directives: [Blueprint.Directive.t()],
selections: [Blueprint.Document.selection_t()],
variable_definitions: [Blueprint.Document.VariableDefinition.t()],
variable_uses: [Blueprint.Input.Variable.Use.t()],
fragment_uses: [Blueprint.Document.Fragment.Named.Use.t()],
source_location: nil | Blueprint.SourceLocation.t(),
schema_node: nil | Absinthe.Type.Object.t(),
complexity: nil | non_neg_integer,
provided_values: %{String.t() => nil | Blueprint.Input.t()},
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
@doc """
Determine if a fragment or variable is used by an operation.
"""
@spec uses?(t, Blueprint.node_t()) :: boolean
def uses?(op, %Blueprint.Document.Fragment.Named{} = node) do
do_uses?(op.fragment_uses, node)
end
def uses?(op, %Blueprint.Input.Variable{} = node) do
do_uses?(op.variable_uses, node)
end
# Whether a node is marked as used in a use list
@spec do_uses?([Blueprint.use_t()], Blueprint.node_t()) :: boolean
defp do_uses?(list, node) do
Enum.find(list, &(&1.name == node.name))
end
end
defmodule Absinthe.Blueprint.Document.Field do
@moduledoc false
alias Absinthe.{Blueprint, Phase, Type}
@enforce_keys [:name]
defstruct [
:name,
alias: nil,
selections: [],
arguments: [],
argument_data: %{},
directives: [],
# Added by phases
flags: %{},
errors: [],
source_location: nil,
type_conditions: [],
schema_node: nil,
complexity: nil
]
@type t :: %__MODULE__{
name: String.t(),
selections: [Blueprint.Document.selection_t()],
arguments: [Blueprint.Input.Argument.t()],
directives: [Blueprint.Directive.t()],
flags: Blueprint.flags_t(),
errors: [Phase.Error.t()],
source_location: nil | Blueprint.SourceLocation.t(),
type_conditions: [Blueprint.TypeReference.Name],
schema_node: Type.t(),
complexity: nil | non_neg_integer
}
end
defmodule Absinthe.Blueprint.Document.Fragment.Named do
@moduledoc false
alias Absinthe.Blueprint
alias __MODULE__
@enforce_keys [:name, :type_condition]
defstruct [
:name,
:type_condition,
selections: [],
directives: [],
source_location: nil,
# Populated by phases
schema_node: nil,
complexity: nil,
flags: %{},
errors: []
]
@type t :: %__MODULE__{
directives: [Blueprint.Directive.t()],
errors: [Absinthe.Phase.Error.t()],
name: String.t(),
selections: [Blueprint.Document.selection_t()],
schema_node: nil | Absinthe.Type.t(),
source_location: nil | Blueprint.SourceLocation.t(),
flags: Blueprint.flags_t(),
type_condition: Blueprint.TypeReference.Name.t()
}
@doc """
Generate a use reference for a fragment.
"""
@spec to_use(t) :: Named.Use.t()
def to_use(%__MODULE__{} = node) do
%Named.Use{
name: node.name,
source_location: node.source_location
}
end
end
defmodule Absinthe.Blueprint.Document.Fragment.Named.Use do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name, :source_location]
defstruct [
:name,
:source_location
]
@type t :: %__MODULE__{
name: String.t(),
source_location: nil | Blueprint.SourceLocation.t()
}
end
defmodule Absinthe.Blueprint.Document.Fragment.Inline do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:type_condition]
defstruct [
:type_condition,
selections: [],
directives: [],
source_location: nil,
# Populated by phases
schema_node: nil,
complexity: nil,
flags: %{},
errors: []
]
@type t :: %__MODULE__{
directives: [Blueprint.Directive.t()],
errors: [Absinthe.Phase.Error.t()],
flags: Blueprint.flags_t(),
selections: [Blueprint.Document.selection_t()],
schema_node: nil | Absinthe.Type.t(),
source_location: nil | Blueprint.SourceLocation.t(),
type_condition: Blueprint.TypeReference.Name.t()
}
end
defmodule Absinthe.Blueprint.Document.Fragment.Spread do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name]
defstruct [
:name,
directives: [],
source_location: nil,
# Populated by phases
complexity: nil,
flags: %{},
errors: []
]
@type t :: %__MODULE__{
directives: [Blueprint.Directive.t()],
errors: [Absinthe.Phase.Error.t()],
name: String.t(),
flags: Blueprint.flags_t(),
source_location: nil | Blueprint.SourceLocation.t()
}
end
defmodule Absinthe.Blueprint.Document.Fragment do
@moduledoc false
alias __MODULE__
@type t ::
Fragment.Inline.t()
| Fragment.Named.t()
| Fragment.Spread.t()
end
defmodule Absinthe.Blueprint.Schema.Deprecation do
@moduledoc false
defstruct reason: nil
@type t :: %__MODULE__{
reason: nil | String.t()
}
end
defmodule Absinthe.Blueprint.Schema.EnumValueDefinition do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:value]
defstruct [
:value,
:name,
:identifier,
deprecation: nil,
deprecation: nil,
directives: [],
source_location: nil,
description: nil,
source_location: nil,
# Added by phases
flags: %{},
module: nil,
errors: [],
__reference__: nil
]
@type t :: %__MODULE__{
value: String.t(),
description: nil | String.t(),
deprecation: nil | Blueprint.Schema.Deprecation.t(),
directives: [Blueprint.Directive.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Schema.FieldDefinition do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name]
defstruct [
:name,
:identifier,
:type,
:module,
description: nil,
deprecation: nil,
config: nil,
triggers: [],
default_value: nil,
arguments: [],
directives: [],
complexity: nil,
source_location: nil,
description: nil,
middleware: [],
function_ref: nil,
flags: %{},
errors: [],
__reference__: nil,
__private__: []
]
@type t :: %__MODULE__{
name: String.t(),
identifier: atom,
description: nil | String.t(),
deprecation: nil | Blueprint.Schema.Deprecation.t(),
arguments: [Blueprint.Schema.InputValueDefinition.t()],
type: Blueprint.TypeReference.t(),
directives: [Blueprint.Directive.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# Added by DSL
description: nil | String.t(),
middleware: [any],
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
@doc false
def functions(), do: [:config, :complexity, :middleware, :triggers]
end
defmodule Absinthe.Blueprint.Schema.InputObjectTypeDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Type}
@enforce_keys [:name]
defstruct [
:identifier,
:name,
:module,
description: nil,
interfaces: [],
fields: [],
imports: [],
directives: [],
source_location: nil,
# Added by phases,
flags: %{},
errors: [],
__reference__: nil,
__private__: []
]
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
fields: [Blueprint.Schema.InputValueDefinition.t()],
directives: [Blueprint.Directive.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
def build(type_def, schema) do
%Type.InputObject{
identifier: type_def.identifier,
name: type_def.name,
fields: build_fields(type_def, schema),
description: type_def.description,
definition: type_def.module
}
end
def build_fields(type_def, schema) do
for field_def <- type_def.fields, into: %{} do
field = %Type.Field{
identifier: field_def.identifier,
deprecation: field_def.deprecation,
description: field_def.description,
name: field_def.name,
type: Blueprint.TypeReference.to_type(field_def.type, schema),
definition: type_def.module,
__reference__: field_def.__reference__,
__private__: field_def.__private__,
default_value: field_def.default_value
}
{field.identifier, field}
end
end
end
defmodule Absinthe.Blueprint.Schema.ObjectTypeDefinition do
@moduledoc false
alias Absinthe.{Blueprint, Type}
@enforce_keys [:name]
defstruct [
:name,
:identifier,
:module,
description: nil,
interfaces: [],
fields: [],
directives: [],
is_type_of: nil,
source_location: nil,
# Added by phases
flags: %{},
imports: [],
errors: [],
__reference__: nil,
__private__: []
]
@type t :: %__MODULE__{
name: String.t(),
identifier: atom,
description: nil | String.t(),
fields: [Blueprint.Schema.FieldDefinition.t()],
interfaces: [String.t()],
directives: [Blueprint.Directive.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()],
__private__: Keyword.t()
}
@doc false
def functions(), do: [:is_type_of]
def build(type_def, schema) do
%Type.Object{
identifier: type_def.identifier,
name: type_def.name,
description: type_def.description,
fields: build_fields(type_def, schema),
interfaces: type_def.interfaces,
definition: type_def.module,
is_type_of: type_def.is_type_of
}
end
def build_fields(type_def, schema) do
for field_def <- type_def.fields, into: %{} do
field = %Type.Field{
identifier: field_def.identifier,
middleware: field_def.middleware,
deprecation: field_def.deprecation,
description: field_def.description,
complexity: field_def.complexity,
config: field_def.complexity,
triggers: field_def.triggers,
name: field_def.name,
type: Blueprint.TypeReference.to_type(field_def.type, schema),
args: build_args(field_def, schema),
definition: field_def.module,
__reference__: field_def.__reference__,
__private__: field_def.__private__
}
{field.identifier, field}
end
end
def build_args(field_def, schema) do
Map.new(field_def.arguments, fn arg_def ->
arg = %Type.Argument{
identifier: arg_def.identifier,
name: arg_def.name,
description: arg_def.description,
type: Blueprint.TypeReference.to_type(arg_def.type, schema),
default_value: arg_def.default_value,
deprecation: arg_def.deprecation
}
{arg_def.identifier, arg}
end)
end
end
defmodule Absinthe.Blueprint.Schema.EnumTypeDefinition do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name]
defstruct [
:name,
:identifier,
:description,
:module,
values: [],
directives: [],
source_location: nil,
# Added by phases,
flags: %{},
errors: [],
__reference__: nil,
__private__: []
]
@type t :: %__MODULE__{
name: String.t(),
values: [Blueprint.Schema.EnumValueDefinition.t()],
directives: [Blueprint.Directive.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
def build(type_def, _schema) do
%Absinthe.Type.Enum{
identifier: type_def.identifier,
name: type_def.name,
values: values_by(type_def, :identifier),
values_by_internal_value: values_by(type_def, :value),
values_by_name: values_by(type_def, :name),
definition: type_def.module,
description: type_def.description
}
end
def values_by(type_def, key) do
for value_def <- type_def.values, into: %{} do
value = %Absinthe.Type.Enum.Value{
name: value_def.name,
value: value_def.value,
__reference__: value_def.__reference__,
description: value_def.description,
deprecation: value_def.deprecation
}
{Map.fetch!(value_def, key), value}
end
end
end
defmodule Absinthe.Blueprint.Schema.InterfaceTypeDefinition do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name]
defstruct [
:identifier,
:name,
:module,
description: nil,
fields: [],
directives: [],
source_location: nil,
# Added by phases
flags: %{},
errors: [],
resolve_type: nil,
imports: [],
__reference__: nil,
__private__: []
]
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
fields: [Blueprint.Schema.FieldDefinition.t()],
directives: [Blueprint.Directive.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
def build(type_def, _schema) do
%Absinthe.Type.Interface{
name: type_def.name,
description: type_def.description,
fields: build_fields(type_def),
identifier: type_def.identifier,
resolve_type: type_def.resolve_type,
definition: type_def.module
}
end
def build_fields(type_def) do
for field_def <- type_def.fields, into: %{} do
attrs =
field_def
|> Map.from_struct()
field = struct(Absinthe.Type.Field, attrs)
{field.identifier, field}
end
end
@doc false
def functions(), do: [:resolve_type]
end
defmodule Absinthe.Blueprint.Schema.InputValueDefinition do
@moduledoc false
alias Absinthe.Blueprint
defstruct [
:name,
:identifier,
:type,
:module,
# InputValueDefinitions can have different placements depending on Whether
# they model an argument definition or a value of an input object type
# definition
placement: :argument_definition,
description: nil,
default_value: nil,
directives: [],
source_location: nil,
# Added by phases
flags: %{},
errors: [],
__reference__: nil,
__private__: [],
deprecation: nil
]
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
type: Blueprint.TypeReference.t(),
default_value: Blueprint.Input.t(),
directives: [Blueprint.Directive.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# The struct module of the parent
placement: :argument_definition | :input_field_definition,
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Schema.UnionTypeDefinition do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name]
defstruct [
:identifier,
:name,
:module,
description: nil,
resolve_type: nil,
directives: [],
types: [],
source_location: nil,
# Added by phases
flags: %{},
errors: [],
__reference__: nil,
__private__: []
]
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
directives: [Blueprint.Directive.t()],
types: [Blueprint.TypeReference.Name.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
def build(type_def, _schema) do
%Absinthe.Type.Union{
name: type_def.name,
description: type_def.description,
identifier: type_def.identifier,
types: type_def.types |> Enum.sort(),
definition: type_def.module,
resolve_type: type_def.resolve_type
}
end
@doc false
def functions(), do: [:resolve_type]
end
defmodule Absinthe.Blueprint.Schema.SchemaDefinition do
@moduledoc false
alias Absinthe.Blueprint
defstruct description: nil,
module: nil,
type_definitions: [],
directive_definitions: [],
type_artifacts: [],
directive_artifacts: [],
type_extensions: [],
directives: [],
source_location: nil,
# Added by phases
flags: %{},
imports: [],
errors: [],
__private__: []
@type t :: %__MODULE__{
description: nil | String.t(),
# types: [Blueprint.Schema.FieldDefinition.t],
directives: [Blueprint.Directive.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
end
defmodule Absinthe.Blueprint.Schema.DirectiveDefinition do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name]
defstruct [
:name,
:module,
:identifier,
description: nil,
directives: [],
arguments: [],
locations: [],
source_location: nil,
expand: nil,
errors: [],
__reference__: nil,
__private__: []
]
@type t :: %__MODULE__{
name: String.t(),
description: nil,
arguments: [Blueprint.Schema.InputValueDefinition.t()],
locations: [String.t()],
source_location: nil | Blueprint.SourceLocation.t(),
errors: [Absinthe.Phase.Error.t()]
}
def build(type_def, schema) do
%Absinthe.Type.Directive{
name: type_def.name,
identifier: type_def.identifier,
description: type_def.description,
args: Blueprint.Schema.ObjectTypeDefinition.build_args(type_def, schema),
locations: type_def.locations |> Enum.sort(),
definition: type_def.module,
expand: type_def.expand
}
end
@doc false
def functions(), do: [:expand]
end
defmodule Absinthe.Blueprint.Schema.ScalarTypeDefinition do
@moduledoc false
alias Absinthe.Blueprint
@enforce_keys [:name]
defstruct [
:name,
:identifier,
:module,
description: nil,
parse: nil,
serialize: nil,
directives: [],
source_location: nil,
# Added by phases
flags: %{},
errors: [],
__reference__: nil,
__private__: []
]
@type t :: %__MODULE__{
name: String.t(),
description: nil | String.t(),
directives: [Blueprint.Directive.t()],
source_location: nil | Blueprint.SourceLocation.t(),
# Added by phases
flags: Blueprint.flags_t(),
errors: [Absinthe.Phase.Error.t()]
}
def build(type_def, _schema) do
%Absinthe.Type.Scalar{
identifier: type_def.identifier,
name: type_def.name,
description: type_def.description,
definition: type_def.module,
serialize: type_def.serialize,
parse: type_def.parse
}
end
@doc false
def functions(), do: [:serialize, :parse]
end
defprotocol Absinthe.Blueprint.Draft do
@moduledoc false
def convert(node, root)
end
defimpl Absinthe.Blueprint.Draft, for: List do
def convert(nodes, root) do
Enum.map(nodes, &Absinthe.Blueprint.Draft.convert(&1, root))
end
end
defimpl Absinthe.Blueprint.Draft, for: Atom do
def convert(atom, _) do
atom
end
end
defmodule Absinthe.Blueprint.Schema do
@moduledoc false
alias __MODULE__
@type type_t ::
Schema.EnumTypeDefinition.t()
| Schema.InputObjectTypeDefinition.t()
| Schema.InterfaceTypeDefinition.t()
| Schema.ObjectTypeDefinition.t()
| Schema.ScalarTypeDefinition.t()
| Schema.UnionTypeDefinition.t()
@type t :: type_t | Schema.DirectiveDefinition.t()
@doc """
Lookup a type definition that is part of a schema.
"""
@spec lookup_type(Blueprint.t(), atom) :: nil | Blueprint.Schema.type_t()
def lookup_type(blueprint, identifier) do
blueprint.schema_definitions
|> List.first()
|> Map.get(:type_definitions)
|> Enum.find(fn
%{identifier: ^identifier} ->
true
_ ->
false
end)
end
def functions(module) do
if function_exported?(module, :functions, 0) do
module.functions
else
[]
end
end
def build([%Absinthe.Blueprint{} = bp | attrs]) do
build_types(attrs, [bp])
end
defp build_types([], [bp]) do
Map.update!(bp, :schema_definitions, &Enum.reverse/1)
end
defp build_types([%Schema.SchemaDefinition{} = schema | rest], stack) do
build_types(rest, [schema | stack])
end
@simple_open [
Schema.ScalarTypeDefinition,
Schema.ObjectTypeDefinition,
Schema.FieldDefinition,
Schema.EnumTypeDefinition,
Schema.DirectiveDefinition,
Schema.InputObjectTypeDefinition,
Schema.InterfaceTypeDefinition,
Schema.UnionTypeDefinition,
Schema.EnumValueDefinition
]
defp build_types([%module{} = type | rest], stack) when module in @simple_open do
build_types(rest, [type | stack])
end
defp build_types([{:import_fields, criterion} | rest], [obj | stack]) do
build_types(rest, [push(obj, :imports, criterion) | stack])
end
defp build_types([{:desc, desc} | rest], [item | stack]) do
build_types(rest, [%{item | description: desc} | stack])
end
defp build_types([{:middleware, middleware} | rest], [field, obj | stack]) do
field = Map.update!(field, :middleware, &(middleware ++ &1))
build_types(rest, [field, obj | stack])
end
defp build_types([{:config, config} | rest], [field | stack]) do
field = %{field | config: config}
build_types(rest, [field | stack])
end
defp build_types([{:trigger, trigger} | rest], [field | stack]) do
field = Map.update!(field, :triggers, &[trigger | &1])
build_types(rest, [field | stack])
end
defp build_types([{:interface, interface} | rest], [obj | stack]) do
obj = Map.update!(obj, :interfaces, &[interface | &1])
build_types(rest, [obj | stack])
end
defp build_types([{:__private__, private} | rest], [entity | stack]) do
entity = Map.update!(entity, :__private__, &update_private(&1, private))
build_types(rest, [entity | stack])
end
defp build_types([{:values, values} | rest], [enum | stack]) do
enum = Map.update!(enum, :values, &(values ++ &1))
build_types(rest, [enum | stack])
end
defp build_types([%Schema.InputValueDefinition{} = arg | rest], [field | stack]) do
build_types(rest, [push(field, :arguments, arg) | stack])
end
defp build_types([{:sdl, sdl_definitions} | rest], [schema | stack]) do
# TODO: Handle directives, etc
build_types(rest, [concat(schema, :type_definitions, sdl_definitions) | stack])
end
defp build_types([{attr, value} | rest], [entity | stack]) do
entity = %{entity | attr => value}
build_types(rest, [entity | stack])
end
defp build_types([:close | rest], [%Schema.EnumValueDefinition{} = value, enum | stack]) do
build_types(rest, [push(enum, :values, value) | stack])
end
defp build_types([:close | rest], [%Schema.FieldDefinition{} = field, obj | stack]) do
field =
field
|> Map.update!(:middleware, &Enum.reverse/1)
|> Map.update!(:triggers, &{:%{}, [], &1})
|> Map.put(:function_ref, {obj.identifier, field.identifier})
build_types(rest, [push(obj, :fields, field) | stack])
end
defp build_types([:close | rest], [%Schema.ObjectTypeDefinition{} = obj, schema | stack]) do
obj = Map.update!(obj, :fields, &Enum.reverse/1)
build_types(rest, [push(schema, :type_definitions, obj) | stack])
end
defp build_types([:close | rest], [%Schema.InputObjectTypeDefinition{} = obj, schema | stack]) do
obj = Map.update!(obj, :fields, &Enum.reverse/1)
build_types(rest, [push(schema, :type_definitions, obj) | stack])
end
defp build_types([:close | rest], [%Schema.InterfaceTypeDefinition{} = iface, schema | stack]) do
iface = Map.update!(iface, :fields, &Enum.reverse/1)
build_types(rest, [push(schema, :type_definitions, iface) | stack])
end
defp build_types([:close | rest], [%Schema.UnionTypeDefinition{} = union, schema | stack]) do
build_types(rest, [push(schema, :type_definitions, union) | stack])
end
defp build_types([:close | rest], [%Schema.DirectiveDefinition{} = dir, schema | stack]) do
build_types(rest, [push(schema, :directive_definitions, dir) | stack])
end
@simple_close [
Schema.ScalarTypeDefinition,
Schema.EnumTypeDefinition
]
defp build_types([:close | rest], [%module{} = type, schema | stack])
when module in @simple_close do
schema = push(schema, :type_definitions, type)
build_types(rest, [schema | stack])
end
defp build_types([:close | rest], [%Schema.SchemaDefinition{} = schema, bp]) do
bp = push(bp, :schema_definitions, schema)
build_types(rest, [bp])
end
defp push(entity, key, value) do
Map.update!(entity, key, &[value | &1])
end
defp concat(entity, key, value) do
Map.update!(entity, key, &(&1 ++ value))
end
defp update_private(existing_private, private) do
Keyword.merge(existing_private, private, fn
:meta, v1, v2 ->
Keyword.merge(v1, v2)
_, _, v2 ->
v2
end)
end
end
defmodule Absinthe.Blueprint.Input do
@moduledoc false
alias Absinthe.Blueprint
alias __MODULE__
import Kernel, except: [inspect: 1]
@type leaf ::
Input.Integer.t()
| Input.Float.t()
| Input.Enum.t()
| Input.String.t()
| Input.Variable.t()
| Input.Boolean.t()
| Input.Null.t()
@type collection ::
Blueprint.Input.List.t()
| Input.Object.t()
@type t :: leaf | collection | Input.Value.t() | Input.Argument.t()
@parse_types [
Input.Boolean,
Input.Enum,
Input.Field,
Input.Float,
Input.Integer,
Input.List,
Input.Object,
Input.String,
Input.Null
]
@spec parse(any) :: nil | t
def parse(%struct{} = value) when struct in @parse_types do
value
end
def parse(value) when is_integer(value) do
%Input.Integer{value: value}
end
def parse(value) when is_float(value) do
%Input.Float{value: value}
end
def parse(value) when is_nil(value) do
%Input.Null{}
end
# Note: The value may actually be an Enum value and may
# need to be manually converted, based on the schema.
def parse(value) when is_binary(value) do
%Input.String{value: value}
end
def parse(value) when is_boolean(value) do
%Input.Boolean{value: value}
end
def parse(value) when is_list(value) do
%Input.List{
items:
Enum.map(value, fn item ->
%Input.RawValue{content: parse(item)}
end)
}
end
def parse(value) when is_map(value) do
%Input.Object{
fields:
Enum.map(value, fn {name, field_value} ->
%Input.Field{
name: name,
input_value: %Input.RawValue{content: parse(field_value)}
}
end)
}
end
@simple_inspect_types [
Input.Boolean,
Input.Float,
Input.Integer,
Input.String
]
@spec inspect(t) :: String.t()
def inspect(%str{} = node) when str in @simple_inspect_types do
Kernel.inspect(node.value)
end
def inspect(%Input.Enum{} = node) do
node.value
end
def inspect(%Input.List{} = node) do
contents =
node.items
|> Enum.map(&inspect/1)
|> Enum.join(", ")
"[#{contents}]"
end
def inspect(%Input.Object{} = node) do
contents =
node.fields
|> Enum.filter(fn %{input_value: input} ->
case input do
%Input.RawValue{content: content} -> content
%Input.Value{raw: nil} -> false
%Input.Value{raw: %{content: content}} -> content
end
end)
|> Enum.map(&inspect/1)
|> Enum.join(", ")
"{#{contents}}"
end
def inspect(%Input.Field{} = node) do
node.name <> ": " <> inspect(node.input_value)
end
def inspect(%Input.Value{raw: raw}) do
inspect(raw)
end
def inspect(%Input.RawValue{content: content}) do
inspect(content)
end
def inspect(%Input.Variable{} = node) do
"$" <> node.name
end
def inspect(%Input.Null{}) do
"null"
end
def inspect(nil) do
"null"
end
def inspect(other) do
Kernel.inspect(other)
end
end
defmodule Absinthe.Blueprint.Transform do
@moduledoc false
alias Absinthe.Blueprint
@doc """
Apply `fun` to a node, then walk to its children and do the same
"""
@spec prewalk(
Blueprint.node_t(),
(Blueprint.node_t() -> Blueprint.node_t() | {:halt, Blueprint.node_t()})
) :: Blueprint.node_t()
def prewalk(node, fun) when is_function(fun, 1) do
{node, _} =
prewalk(node, nil, fn x, nil ->
case fun.(x) do
{:halt, x} -> {:halt, x, nil}
x -> {x, nil}
end
end)
node
end
@doc """
Same as `prewalk/2` but takes and returns an accumulator
The supplied function must be arity 2.
"""
@spec prewalk(
Blueprint.node_t(),
acc,
(Blueprint.node_t(), acc ->
{Blueprint.node_t(), acc} | {:halt, Blueprint.node_t(), acc})
) :: {Blueprint.node_t(), acc}
when acc: var
def prewalk(node, acc, fun) when is_function(fun, 2) do
walk(node, acc, fun, &pass/2)
end
@doc """
Apply `fun` to all children of a node, then apply `fun` to node
"""
@spec postwalk(Blueprint.node_t(), (Blueprint.node_t() -> Blueprint.node_t())) ::
Blueprint.node_t()
def postwalk(node, fun) when is_function(fun, 1) do
{node, _} = postwalk(node, nil, fn x, nil -> {fun.(x), nil} end)
node
end
@doc """
Same as `postwalk/2` but takes and returns an accumulator
"""
@spec postwalk(Blueprint.node_t(), acc, (Blueprint.node_t(), acc -> {Blueprint.node_t(), acc})) ::
{Blueprint.node_t(), acc}
when acc: var
def postwalk(node, acc, fun) when is_function(fun, 2) do
walk(node, acc, &pass/2, fun)
end
defp pass(x, acc), do: {x, acc}
nodes_with_children = %{
Blueprint => [:fragments, :operations, :schema_definitions, :directives],
Blueprint.Directive => [:arguments],
Blueprint.Document.Field => [:selections, :arguments, :directives],
Blueprint.Document.Operation => [:selections, :variable_definitions, :directives],
Blueprint.TypeReference.List => [:of_type],
Blueprint.TypeReference.NonNull => [:of_type],
Blueprint.Document.Fragment.Inline => [:selections, :directives],
Blueprint.Document.Fragment.Named => [:selections, :directives],
Blueprint.Document.Fragment.Spread => [:directives],
Blueprint.Document.VariableDefinition => [:type, :default_value],
Blueprint.Input.Argument => [:input_value],
Blueprint.Input.Field => [:input_value],
Blueprint.Input.Object => [:fields],
Blueprint.Input.List => [:items],
Blueprint.Input.RawValue => [:content],
Blueprint.Input.Value => [:normalized],
Blueprint.Schema.DirectiveDefinition => [:directives, :arguments],
Blueprint.Schema.EnumTypeDefinition => [:directives, :values],
Blueprint.Schema.EnumValueDefinition => [:directives],
Blueprint.Schema.FieldDefinition => [:type, :arguments, :directives],
Blueprint.Schema.InputObjectTypeDefinition => [:interfaces, :fields, :directives],
Blueprint.Schema.InputValueDefinition => [:type, :default_value, :directives],
Blueprint.Schema.InterfaceTypeDefinition => [:fields, :directives],
Blueprint.Schema.ObjectTypeDefinition => [:interfaces, :fields, :directives],
Blueprint.Schema.ScalarTypeDefinition => [:directives],
Blueprint.Schema.SchemaDefinition => [:directive_definitions, :type_definitions, :directives],
Blueprint.Schema.UnionTypeDefinition => [:directives, :types]
}
@spec walk(
Blueprint.node_t(),
acc,
(Blueprint.node_t(), acc ->
{Blueprint.node_t(), acc} | {:halt, Blueprint.node_t(), acc}),
(Blueprint.node_t(), acc -> {Blueprint.node_t(), acc})
) :: {Blueprint.node_t(), acc}
when acc: var
def walk(blueprint, acc, pre, post)
def walk(nodes, acc, pre, post) when is_list(nodes) do
Enum.map_reduce(nodes, acc, &walk(&1, &2, pre, post))
end
def walk(node, acc, pre, post) do
{node, acc} =
case pre.(node, acc) do
{:halt, node, acc} ->
{node, acc}
{node, acc} ->
maybe_walk_children(node, acc, pre, post)
end
post.(node, acc)
end
for {node_name, children} <- nodes_with_children do
if :selections in children do
def maybe_walk_children(%unquote(node_name){flags: %{flat: _}} = node, acc, pre, post) do
node_with_children(node, unquote(children -- [:selections]), acc, pre, post)
end
end
def maybe_walk_children(%unquote(node_name){} = node, acc, pre, post) do
node_with_children(node, unquote(children), acc, pre, post)
end
end
def maybe_walk_children(node, acc, _, _) do
{node, acc}
end
defp node_with_children(node, children, acc, pre, post) do
{node, acc} = walk_children(node, children, acc, pre, post)
post.(node, acc)
end
defp walk_children(node, children, acc, pre, post) do
Enum.reduce(children, {node, acc}, fn child_key, {node, acc} ->
{children, acc} =
node
|> Map.fetch!(child_key)
|> walk(acc, pre, post)
{Map.put(node, child_key, children), acc}
end)
end
end
defmodule Absinthe.Blueprint.Document do
@moduledoc false
alias Absinthe.Blueprint
@type t ::
Blueprint.Document.Field.t()
| Blueprint.Document.Fragment.t()
| Blueprint.Document.Operation.t()
| Blueprint.Document.VariableDefinition.t()
@type selection_t ::
Blueprint.Document.Field.t()
| Blueprint.Document.Fragment.Inline.t()
| Blueprint.Document.Fragment.Spread.t()
end
defmodule Absinthe.Blueprint.Directive do
@moduledoc false
alias Absinthe.{Blueprint, Phase}
@enforce_keys [:name]
defstruct [
:name,
arguments: [],
# When part of a Document
source_location: nil,
# Added by phases
schema_node: nil,
flags: %{},
errors: []
]
@type t :: %__MODULE__{
name: String.t(),
arguments: [Blueprint.Input.Argument.t()],
source_location: nil | Blueprint.SourceLocation.t(),
schema_node: nil | Absinthe.Type.Directive.t(),
flags: Blueprint.flags_t(),
errors: [Phase.Error.t()]
}
@spec expand(t, Blueprint.node_t()) :: {t, map}
def expand(%__MODULE__{schema_node: nil}, node) do
node
end
def expand(%__MODULE__{schema_node: type} = directive, node) do
args = Blueprint.Input.Argument.value_map(directive.arguments)
Absinthe.Type.function(type, :expand).(args, node)
end
@doc """
Determine the placement name for a given Blueprint node
"""
@spec placement(Blueprint.node_t()) :: nil | atom
def placement(%Blueprint.Document.Operation{type: type}), do: type
def placement(%Blueprint.Document.Field{}), do: :field
def placement(%Blueprint.Document.Fragment.Named{}), do: :fragment_definition
def placement(%Blueprint.Document.Fragment.Spread{}), do: :fragment_spread
def placement(%Blueprint.Document.Fragment.Inline{}), do: :inline_fragment
def placement(%Blueprint.Document.Operation{}), do: :operation_definition
def placement(%Blueprint.Schema.SchemaDefinition{}), do: :schema
def placement(%Blueprint.Schema.ScalarTypeDefinition{}), do: :scalar
def placement(%Blueprint.Schema.ObjectTypeDefinition{}), do: :object
def placement(%Blueprint.Schema.FieldDefinition{}), do: :field_definition
def placement(%Blueprint.Schema.InterfaceTypeDefinition{}), do: :interface
def placement(%Blueprint.Schema.UnionTypeDefinition{}), do: :union
def placement(%Blueprint.Schema.EnumTypeDefinition{}), do: :enum
def placement(%Blueprint.Schema.EnumValueDefinition{}), do: :enum_value
def placement(%Blueprint.Schema.InputObjectTypeDefinition{}), do: :input_object
def placement(%Blueprint.Schema.InputValueDefinition{placement: placement}), do: placement
end
defmodule Absinthe.Lexer do
import NimbleParsec
# Codepoints
@horizontal_tab 0x0009
@newline 0x000A
@carriage_return 0x000D
@space 0x0020
@unicode_final 0xFFFF
@unicode_bom 0xFEFF
# SourceCharacter :: /[\u0009\u000A\u000D\u0020-\uFFFF]/
source_character =
utf8_char([
@horizontal_tab,
@newline,
@carriage_return,
@space..@unicode_final
])
# ## Ignored Tokens
# UnicodeBOM :: "Byte Order Mark (U+FEFF)"
unicode_bom = utf8_char([@unicode_bom])
# WhiteSpace ::
# - "Horizontal Tab (U+0009)"
# - "Space (U+0020)"
whitespace =
ascii_char([
@horizontal_tab,
@space
])
# LineTerminator ::
# - "New Line (U+000A)"
# - "Carriage Return (U+000D)" [ lookahead ! "New Line (U+000A)" ]
# - "Carriage Return (U+000D)" "New Line (U+000A)"
line_terminator =
choice([
ascii_char([@newline]),
ascii_char([@carriage_return])
|> optional(ascii_char([@newline]))
])
# Comment :: `#` CommentChar*
# CommentChar :: SourceCharacter but not LineTerminator
comment =
string("#")
|> repeat_while(source_character, {:not_line_terminator, []})
# Comma :: ,
comma = ascii_char([?,])
# Ignored ::
# - UnicodeBOM
# - WhiteSpace
# - LineTerminator
# - Comment
# - Comma
ignored =
choice([
unicode_bom,
whitespace,
line_terminator,
comment,
comma
])
# ## Lexical Tokens
# - Punctuator
# - Name
# - IntValue
# - FloatValue
# - StringValue
punctuator =
choice([
ascii_char([
?!,
?$,
?(,
?),
?:,
?=,
?@,
?[,
?],
?{,
?|,
?}
]),
times(ascii_char([?.]), 3)
])
|> traverse({:atom_token, []})
name_or_reserved_word =
ascii_char([?_, ?A..?Z, ?a..?z])
|> repeat(ascii_char([?_, ?0..?9, ?A..?Z, ?a..?z]))
|> traverse({:name_or_reserved_word_token, []})
# NegativeSign :: -
negative_sign = ascii_char([?-])
# Digit :: one of 0 1 2 3 4 5 6 7 8 9
digit = ascii_char([?0..?9])
# NonZeroDigit :: Digit but not `0`
non_zero_digit = ascii_char([?1..?9])
# IntegerPart ::
# - NegativeSign? 0
# - NegativeSign? NonZeroDigit Digit*
integer_part =
optional(negative_sign)
|> choice([
ascii_char([?0]),
non_zero_digit |> repeat(digit)
])
# IntValue :: IntegerPart
int_value =
empty()
|> concat(integer_part)
|> traverse({:labeled_token, [:int_value]})
# FractionalPart :: . Digit+
fractional_part =
ascii_char([?.])
|> times(digit, min: 1)
# ExponentIndicator :: one of `e` `E`
exponent_indicator = ascii_char([?e, ?E])
# Sign :: one of + -
sign = ascii_char([?+, ?-])
# ExponentPart :: ExponentIndicator Sign? Digit+
exponent_part =
exponent_indicator
|> optional(sign)
|> times(digit, min: 1)
# FloatValue ::
# - IntegerPart FractionalPart
# - IntegerPart ExponentPart
# - IntegerPart FractionalPart ExponentPart
float_value =
choice([
integer_part |> concat(fractional_part) |> concat(exponent_part),
integer_part |> traverse({:fill_mantissa, []}) |> concat(exponent_part),
integer_part |> concat(fractional_part)
])
|> traverse({:labeled_token, [:float_value]})
# EscapedUnicode :: /[0-9A-Fa-f]{4}/
escaped_unicode =
times(ascii_char([?0..?9, ?A..?F, ?a..?f]), 4)
|> traverse({:unescape_unicode, []})
# EscapedCharacter :: one of `"` \ `/` b f n r t
escaped_character =
choice([
ascii_char([?"]),
ascii_char([?\\]),
ascii_char([?/]),
ascii_char([?b]) |> replace(?\b),
ascii_char([?f]) |> replace(?\f),
ascii_char([?n]) |> replace(?\n),
ascii_char([?r]) |> replace(?\r),
ascii_char([?t]) |> replace(?\t)
])
# StringCharacter ::
# - SourceCharacter but not `"` or \ or LineTerminator
# - \u EscapedUnicode
# - \ EscapedCharacter
string_character =
choice([
ignore(string(~S(\u))) |> concat(escaped_unicode),
ignore(ascii_char([?\\])) |> concat(escaped_character),
source_character
])
# BlockStringCharacter ::
# - SourceCharacter but not `"""` or `\"""`
# - `\"""`
# Note: Block string values are interpreted to exclude blank initial and trailing
# lines and uniform indentation with {BlockStringValue()}.
block_string_character =
choice([
string(~S(\""")) |> replace(~s(""")),
source_character
])
# StringValue ::
# - `"` StringCharacter* `"`
# - `"""` BlockStringCharacter* `"""`
# TODO: Use block_string_character
string_value =
ascii_char([?"])
|> repeat_while(string_character, {:not_end_of_quote, []})
|> ascii_char([?"])
|> traverse({:labeled_token, [:string_value]})
block_string_value =
ignore(string(~S(""")) |> traverse({:mark_block_string_start, []}))
|> repeat_while(block_string_character, {:not_end_of_block_quote, []})
|> ignore(string(~S(""")))
|> traverse({:block_string_value_token, []})
# BooleanValue : one of `true` `false`
boolean_value =
choice([
string("true"),
string("false")
])
|> traverse({:boolean_value_token, []})
defp not_end_of_quote(<<?", _::binary>>, context, _, _) do
{:halt, context}
end
defp not_end_of_quote(rest, context, current_line, current_offset) do
not_line_terminator(rest, context, current_line, current_offset)
end
defp not_end_of_block_quote(<<?", ?", ?", _::binary>>, context, _, _) do
{:halt, context}
end
defp not_end_of_block_quote(_, context, _, _) do
{:cont, context}
end
def tokenize(input) do
case do_tokenize(input) do
{:ok, tokens, "", _, _, _} ->
{:ok, tokens}
{:ok, _, rest, _, {line, line_offset}, byte_offset} ->
column = byte_offset - line_offset + 1
{:error, rest, {line, column}}
other ->
other
end
end
defparsec(
:do_tokenize,
repeat(
choice([
ignore(ignored),
comment,
punctuator,
block_string_value,
string_value,
float_value,
int_value,
boolean_value,
name_or_reserved_word
])
)
)
defp fill_mantissa(_rest, raw, context, _, _), do: {'0.' ++ raw, context}
defp unescape_unicode(_rest, content, context, _loc, _) do
code = content |> Enum.reverse()
value = :httpd_util.hexlist_to_integer(code)
binary = :unicode.characters_to_binary([value])
{[binary], context}
end
@reserved_words ~w(
directive
enum
extend
fragment
implements
input
interface
mutation
null
on
ON
query
scalar
schema
subscription
type
union
) |> Enum.map(&String.to_charlist/1)
defp name_or_reserved_word_token(rest, chars, context, loc, byte_offset) do
value = chars |> Enum.reverse()
do_name_or_reserved_word_token(rest, value, context, loc, byte_offset)
end
defp do_name_or_reserved_word_token(_rest, value, context, loc, byte_offset)
when value in @reserved_words do
token_name = value |> List.to_atom()
{[{token_name, line_and_column(loc, byte_offset, length(value))}], context}
end
defp do_name_or_reserved_word_token(_rest, value, context, loc, byte_offset) do
{[{:name, line_and_column(loc, byte_offset, length(value)), value}], context}
end
defp boolean_value_token(_rest, [token_string], context, loc, byte_offset) do
value = token_string |> String.to_charlist()
{[{:boolean_value, line_and_column(loc, byte_offset, length(value)), value}], context}
end
defp labeled_token(_rest, chars, context, loc, byte_offset, token_name) do
value = chars |> Enum.reverse()
{[{token_name, line_and_column(loc, byte_offset, length(value)), value}], context}
end
defp mark_block_string_start(_rest, chars, context, loc, byte_offset) do
{[chars], Map.put(context, :token_location, line_and_column(loc, byte_offset, 3))}
end
defp block_string_value_token(_rest, chars, context, _loc, _byte_offset) do
value = '"""' ++ (chars |> Enum.reverse()) ++ '"""'
{[{:block_string_value, context.token_location, value}], Map.delete(context, :token_location)}
end
defp atom_token(_rest, chars, context, loc, byte_offset) do
value = chars |> Enum.reverse()
token_atom = value |> List.to_atom()
{[{token_atom, line_and_column(loc, byte_offset, length(value))}], context}
end
def line_and_column({line, line_offset}, byte_offset, column_correction) do
column = byte_offset - line_offset - column_correction + 1
{line, column}
end
defp not_line_terminator(<<?\n, _::binary>>, context, _, _), do: {:halt, context}
defp not_line_terminator(<<?\r, _::binary>>, context, _, _), do: {:halt, context}
defp not_line_terminator(_, context, _, _), do: {:cont, context}
end
defmodule Absinthe.Plugin do
@moduledoc """
Plugin Behaviour
Plugins are an additional set of callbacks that can be used by module based
middleware to run actions before and after resolution, as well as add additional
phases to run after resolution
"""
@type t :: module
@doc """
callback to setup the resolution accumulator prior to resolution.
NOTE: This function is given the full accumulator. Namespacing is suggested to
avoid conflicts.
"""
@callback before_resolution(execution :: Document.Execution.t()) :: Document.Execution.t()
@doc """
callback to do something with the resolution accumulator after
resolution.
NOTE: This function is given the full accumulator. Namespacing is suggested to
avoid conflicts.
"""
@callback after_resolution(execution :: Document.Execution.t()) :: Document.Execution.t()
@doc """
callback used to specify additional phases to run.
Plugins may require additional resolution phases to be run. This function should
use values set in the resolution accumulator to determine
whether or not additional phases are required.
NOTE: This function is given the whole pipeline to be inserted after the current
phase completes.
"""
@callback pipeline(next_pipeline :: Absinthe.Pipeline.t(), execution :: Document.Execution.t()) ::
Absinthe.Pipeline.t()
@doc """
Returns the list of default plugins.
"""
def defaults() do
[Absinthe.Middleware.Batch, Absinthe.Middleware.Async]
end
@doc false
def pipeline(plugins, exec) do
Enum.reduce(plugins, [], fn plugin, pipeline ->
plugin.pipeline(pipeline, exec)
end)
|> Enum.dedup()
|> List.flatten()
end
end
locals_without_parens = [
mutation: 2,
query: 2,
subscription: 2,
arg: 2,
arg: 3,
complexity: 1,
config: 1,
deprecate: 1,
description: 1,
directive: 3,
enum: 2,
enum: 3,
expand: 1,
field: 2,
field: 3,
field: 4,
import_fields: 2,
import_fields: 1,
import_types: 1,
input_object: 3,
instruction: 1,
interface: 1,
interface: 3,
interfaces: 1,
is_type_of: 1,
meta: 1,
meta: 2,
middleware: 2,
middleware: 1,
object: 3,
on: 1,
parse: 1,
record_object!: 4,
recordable!: 4,
resolve: 1,
resolve_type: 1,
scalar: 2,
scalar: 3,
serialize: 1,
trigger: 2,
types: 1,
union: 3,
value: 1,
value: 2,
]
[
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"],
locals_without_parens: locals_without_parens,
export: [
locals_without_parens: locals_without_parens,
]
]
# This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any exec using `mix credo -C <name>`. If no exec name is given
# "default" is used.
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
included: ["lib/", "src/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
requires: [],
#
# Credo automatically checks for updates, like e.g. Hex does.
# You can disable this behaviour below:
check_for_updates: true,
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
strict: false,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
# For some checks, like AliasUsage, you can only customize the priority
# Priority values are: `low, normal, high, higher`
{Credo.Check.Design.AliasUsage, priority: :low},
# For others you can set parameters
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 120},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.LongQuoteBlocks},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.LazyLogging},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
defmodule Guardian.Mixfile do
@moduledoc false
use Mix.Project
@version "1.1.1"
@url "https://github.com/ueberauth/guardian"
@maintainers [
"Daniel Neighman",
"Sonny Scroggin",
"Sean Callan"
]
def project do
[
name: "Guardian",
app: :guardian,
version: @version,
elixir: "~> 1.4 or ~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
source_url: @url,
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
maintainers: @maintainers,
description: "Elixir Authentication framework",
homepage_url: @url,
docs: docs(),
deps: deps(),
xref: [exclude: [:phoenix]],
dialyzer: [
plt_add_deps: :transitive,
plt_add_apps: [:mix],
flags: [:race_conditions, :no_opaque]
],
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.html": :test
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def application do
[extra_applications: [:crypto, :logger]]
end
def docs do
[
source_ref: "v#{@version}",
main: "introduction-overview",
extra_section: "guides",
assets: "guides/assets",
formatters: ["html", "epub"],
groups_for_modules: groups_for_modules(),
extras: extras()
]
end
defp extras do
[
"guides/introduction/overview.md": [
group: "Introduction",
filename: "introduction-overview"
],
"guides/introduction/installation.md": [
group: "Introduction",
filename: "introduction-installation"
],
"guides/introduction/implementation.md": [
group: "Introduction",
filename: "introduction-implementation",
title: "Implementation Modules"
],
"guides/introduction/community.md": [
group: "Introduction",
filename: "introduction-community"
],
"guides/tutorial/start-tutorial.md": [
group: "Tutorial",
filename: "tutorial-start",
title: "Start"
],
"guides/tokens/start-tokens.md": [group: "Tokens", filename: "tokens-start", title: "Start"],
"guides/tokens/jwt/start.md": [
group: "JWT Tokens",
filename: "tokens-jwt-start",
title: "Start"
],
"guides/plug/start-plug.md": [group: "Plug", filename: "plug-start", title: "Start"],
"guides/plug/pipelines.md": [group: "Plug", filename: "plug-pipelines", title: "Pipelines"],
"guides/phoenix/start-phoenix.md": [
group: "Phoenix",
filename: "phoenix-start",
title: "Start"
],
"guides/permissions/start-permissions.md": [
group: "Permissions",
filename: "permissions-start",
title: "Start"
],
"guides/upgrading/v1.0.md": [group: "Upgrade Guides", filename: "upgrading-v1.0"]
]
end
defp groups_for_modules do
# Ungrouped:
# - Guardian
[
Tokens: [
Guardian.Token,
Guardian.Token.Verify,
Guardian.Token.Jwt,
Guardian.Token.Jwt.Verify
],
Plugs: [
Guardian.Plug,
Guardian.Plug.Pipeline,
Guardian.Plug.EnsureAuthenticated,
Guardian.Plug.EnsureNotAuthenticated,
Guardian.Plug.LoadResource,
Guardian.Plug.VerifySession,
Guardian.Plug.VerifyHeader,
Guardian.Plug.VerifyCookie,
Guardian.Plug.Keys
],
Phoenix: [
Guardian.Phoenix.Socket
],
Permissions: [
Guardian.Permissions.Bitwise
]
]
end
defp deps do
[
{:jose, "~> 1.8"},
{:poison, "~> 2.2 or ~> 3.0"},
# Optional dependencies
{:phoenix, "~> 1.0 or ~> 1.2 or ~> 1.3", optional: true},
{:plug, "~> 1.3.3 or ~> 1.4", optional: true},
# Tools
{:dialyxir, ">= 0.0.0", only: [:dev], runtime: false},
{:credo, ">= 0.0.0", only: [:dev, :test], runtime: false},
{:excoveralls, ">= 0.0.0", only: [:test], runtime: false},
{:ex_doc, ">= 0.0.0", only: [:dev], runtime: false},
{:inch_ex, ">= 0.0.0", only: [:dev], runtime: false}
]
end
defp package do
[
maintainers: @maintainers,
licenses: ["MIT"],
links: %{github: @url},
files: ~w(lib) ++ ~w(CHANGELOG.md LICENSE mix.exs README.md)
]
end
end
defmodule GuardianTest do
@moduledoc false
import Guardian.Support.Utils, only: [gather_function_calls: 0]
use ExUnit.Case, async: true
setup do
{:ok, %{impl: GuardianTest.Impl}}
end
defmodule Impl do
@moduledoc false
use Guardian,
otp_app: :guardian,
token_module: Guardian.Support.TokenModule
import Guardian.Support.Utils,
only: [
send_function_call: 1
]
def subject_for_token(%{id: id} = r, claims) do
send_function_call({__MODULE__, :subject_for_token, [r, claims]})
{:ok, id}
end
def subject_for_token(%{"id" => id} = r, claims) do
send_function_call({__MODULE__, :subject_for_token, [r, claims]})
{:ok, id}
end
def resource_from_claims(%{"sub" => id} = claims) do
send_function_call({__MODULE__, :subject_for_token, [claims]})
{:ok, %{id: id}}
end
def build_claims(claims, resource, options) do
send_function_call({__MODULE__, :build_claims, [claims, resource, options]})
if Keyword.get(options, :fail_build_claims) do
{:error, Keyword.get(options, :fail_build_claims)}
else
{:ok, claims}
end
end
def after_encode_and_sign(resource, claims, token, options) do
send_function_call({__MODULE__, :after_encode_and_sign, [resource, claims, token, options]})
if Keyword.get(options, :fail_after_encode_and_sign) do
{:error, Keyword.get(options, :fail_after_encode_and_sign)}
else
{:ok, token}
end
end
def after_sign_in(conn, location) do
send_function_call({__MODULE__, :after_sign_in, [:conn, location]})
conn
end
def before_sign_out(conn, location) do
send_function_call({__MODULE__, :before_sign_out, [:conn, location]})
conn
end
def verify_claims(claims, options) do
send_function_call({__MODULE__, :verify_claims, [claims, options]})
if Keyword.get(options, :fail_mod_verify_claims) do
{:error, Keyword.get(options, :fail_mod_verify_claims)}
else
{:ok, claims}
end
end
def on_verify(claims, token, options) do
send_function_call({__MODULE__, :on_verify, [claims, token, options]})
if Keyword.get(options, :fail_on_verify) do
{:error, Keyword.get(options, :fail_on_verify)}
else
{:ok, claims}
end
end
def on_revoke(claims, token, options) do
send_function_call({__MODULE__, :on_revoke, [claims, token, options]})
if Keyword.get(options, :fail_on_revoke) do
{:error, Keyword.get(options, :fail_on_revoke)}
else
{:ok, claims}
end
end
def on_refresh(old_stuff, new_stuff, options) do
send_function_call({__MODULE__, :on_refresh, [old_stuff, new_stuff, options]})
if Keyword.get(options, :fail_on_refresh) do
{:error, Keyword.get(options, :fail_on_refresh)}
else
{:ok, old_stuff, new_stuff}
end
end
def on_exchange(old_stuff, new_stuff, options) do
send_function_call({__MODULE__, :on_exchange, [old_stuff, new_stuff, options]})
if Keyword.get(options, :fail_on_exchange) do
{:error, Keyword.get(options, :fail_on_exchange)}
else
{:ok, old_stuff, new_stuff}
end
end
end
describe "encode_and_sign" do
@resource %{id: "bobby"}
test "the impl has access to it's config", ctx do
assert ctx.impl.config(:token_module) == Guardian.Support.TokenModule
end
test "encode_and_sign with only a resource", ctx do
assert {:ok, token, full_claims} = Guardian.encode_and_sign(ctx.impl, @resource, %{}, [])
assert full_claims == %{"sub" => "bobby", "typ" => "access"}
expected = [
{ctx.impl, :subject_for_token, [%{id: "bobby"}, %{}]},
{Guardian.Support.TokenModule, :build_claims, [ctx.impl, @resource, "bobby", %{}, []]},
{ctx.impl, :build_claims, [full_claims, @resource, []]},
{Guardian.Support.TokenModule, :create_token, [ctx.impl, full_claims, []]},
{ctx.impl, :after_encode_and_sign, [@resource, full_claims, token, []]}
]
assert gather_function_calls() == expected
end
test "with custom claims", ctx do
claims = %{"some" => "claim"}
assert {:ok, token, full_claims} = Guardian.encode_and_sign(ctx.impl, @resource, claims, [])
assert full_claims == %{"sub" => "bobby", "some" => "claim", "typ" => "access"}
expected = [
{ctx.impl, :subject_for_token, [@resource, claims]},
{Guardian.Support.TokenModule, :build_claims, [ctx.impl, @resource, "bobby", claims, []]},
{ctx.impl, :build_claims, [full_claims, @resource, []]},
{Guardian.Support.TokenModule, :create_token, [ctx.impl, full_claims, []]},
{ctx.impl, :after_encode_and_sign, [@resource, full_claims, token, []]}
]
assert gather_function_calls() == expected
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment