Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add dialyxir to CI #1248

Merged
merged 8 commits into from
Jun 19, 2023
6 changes: 5 additions & 1 deletion .dialyzer_ignore.exs
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
[
{":0:unknown_function Function :persistent_term.get/1 does not exist."},
{":0:unknown_function Function :persistent_term.put/2 does not exist."}
{":0:unknown_function Function :persistent_term.put/2 does not exist."},
{"lib/absinthe/middleware/async.ex", :unknown_function, 117},
{"lib/absinthe/middleware/batch.ex", :unknown_function, 213},
{"lib/absinthe/utils/render.ex", :improper_list_constr, 51},
Comment on lines +4 to +6
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Lemme know if you prefer a broader "no-line" entry.

{":0:unknown_function Function OpentelemetryProcessPropagator.Task.async/1 does not exist."}
]
14 changes: 14 additions & 0 deletions .github/workflows/elixir.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,3 +64,17 @@ jobs:
mix test
env:
SCHEMA_PROVIDER: persistent_term

- name: Cache/uncache PLTs
uses: actions/cache@v3
with:
path: |
priv/plts
key: "${{ runner.os }}-\
erlang-${{ matrix.otp }}-\
elixir-${{ matrix.elixir }}-\
${{ hashFiles('mix.lock') }}"

- name: Run Dialyzer
run: mix dialyzer

2 changes: 1 addition & 1 deletion lib/absinthe/blueprint/source_location.ex
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ defmodule Absinthe.Blueprint.SourceLocation do
@doc """
Generate a `SourceLocation.t()` given line and column numbers
"""
@spec at(line :: pos_integer, column :: pos_integer) :: t
@spec at(line :: pos_integer, column :: non_neg_integer) :: t
def at(line, column) do
%__MODULE__{line: line, column: column}
end
Expand Down
20 changes: 13 additions & 7 deletions lib/absinthe/lexer.ex
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ defmodule Absinthe.Lexer do
@space 0x0020
@unicode_bom 0xFEFF

@stopped_at_token_limit ":stopped_at_token_limit"

Comment on lines +11 to +12
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because nimble_parsec specific errors as String.t().

Another solution is to pull request there so the error reason can be term(), for example.

# SourceCharacter :: /[\u0009\u000A\u000D\u0020-\uFFFF]/

any_unicode = utf8_char([])
Expand Down Expand Up @@ -228,14 +230,16 @@ defmodule Absinthe.Lexer do
end

@spec tokenize(binary(), Keyword.t()) ::
{:ok, [any()]} | {:error, binary(), {integer(), non_neg_integer()}}
{:ok, [any()]}
| {:error, binary(), {integer(), non_neg_integer()}}
| {:error, :exceeded_token_limit}
def tokenize(input, options \\ []) do
lines = String.split(input, ~r/\r?\n/)

tokenize_opts = [context: %{token_limit: Keyword.get(options, :token_limit, :infinity)}]

case do_tokenize(input, tokenize_opts) do
{:error, :stopped_at_token_limit, _, _, _, _} ->
{:error, @stopped_at_token_limit, _, _, _, _} ->
{:error, :exceeded_token_limit}

{:ok, tokens, "", _, _, _} ->
Expand Down Expand Up @@ -341,6 +345,8 @@ defmodule Absinthe.Lexer do

@spec do_tokenize(binary()) ::
{:ok, [any()], binary(), map(), {pos_integer(), pos_integer()}, pos_integer()}
| {:error, String.t(), String.t(), map(), {non_neg_integer(), non_neg_integer()},
non_neg_integer()}
defparsec(
:do_tokenize,
repeat(
Expand Down Expand Up @@ -399,7 +405,7 @@ defmodule Absinthe.Lexer do
_
)
when count >= limit do
{:error, :stopped_at_token_limit}
{:error, @stopped_at_token_limit}
end

defp boolean_value_or_name_or_reserved_word(rest, chars, context, loc, byte_offset) do
Expand All @@ -425,7 +431,7 @@ defmodule Absinthe.Lexer do

defp labeled_token(_, _, %{token_count: count, token_limit: limit} = _context, _, _, _)
when count >= limit,
do: {:error, :stopped_at_token_limit}
do: {:error, @stopped_at_token_limit}

defp labeled_token(rest, chars, context, loc, byte_offset, token_name) do
context = Map.update(context, :token_count, 1, &(&1 + 1))
Expand All @@ -443,7 +449,7 @@ defmodule Absinthe.Lexer do

defp block_string_value_token(_, _, %{token_count: count, token_limit: limit} = _context, _, _)
when count >= limit,
do: {:error, :stopped_at_token_limit}
do: {:error, @stopped_at_token_limit}

defp block_string_value_token(rest, chars, context, _loc, _byte_offset) do
context = Map.update(context, :token_count, 1, &(&1 + 1))
Expand All @@ -455,7 +461,7 @@ defmodule Absinthe.Lexer do

defp string_value_token(_, _, %{token_count: count, token_limit: limit} = _context, _, _)
when count >= limit,
do: {:error, :stopped_at_token_limit}
do: {:error, @stopped_at_token_limit}

defp string_value_token(rest, chars, context, _loc, _byte_offset) do
context = Map.update(context, :token_count, 1, &(&1 + 1))
Expand All @@ -465,7 +471,7 @@ defmodule Absinthe.Lexer do

defp atom_token(_, _, %{token_count: count, token_limit: limit} = _context, _, _)
when count >= limit do
{:error, :stopped_at_token_limit}
{:error, @stopped_at_token_limit}
end

defp atom_token(rest, chars, context, loc, byte_offset) do
Expand Down
2 changes: 1 addition & 1 deletion lib/absinthe/phase/parse.ex
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ defmodule Absinthe.Phase.Parse do
# This is because Dialyzer is telling us tokenizing can never fail,
# but we know it's possible.
@dialyzer {:no_match, parse: 2}
@spec parse(binary | Language.Source.t(), Map.t()) ::
@spec parse(binary | Language.Source.t(), Keyword.t()) ::
{:ok, Language.Document.t()} | {:error, tuple}
defp parse(input, options) when is_binary(input) do
parse(%Language.Source{body: input}, options)
Expand Down
7 changes: 5 additions & 2 deletions lib/absinthe/pipeline.ex
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@ defmodule Absinthe.Pipeline do

@type t :: [phase_config_t | [phase_config_t]]

@spec run(data_t, t) :: {:ok, data_t, [Phase.t()]} | {:error, String.t(), [Phase.t()]}
@spec run(data_t, t) ::
{:ok, data_t, [Phase.t()]}
| {:error, String.t() | {:http_method, String.t()}, [Phase.t()]}
def run(input, pipeline) do
pipeline
|> List.flatten()
Expand Down Expand Up @@ -392,7 +394,8 @@ defmodule Absinthe.Pipeline do
end

@spec run_phase(t, data_t, [Phase.t()]) ::
{:ok, data_t, [Phase.t()]} | {:error, String.t(), [Phase.t()]}
{:ok, data_t, [Phase.t()]}
| {:error, String.t() | {:http_method, String.t()}, [Phase.t()]}
def run_phase(pipeline, input, done \\ [])

def run_phase([], input, done) do
Expand Down
3 changes: 2 additions & 1 deletion mix.exs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ defmodule Absinthe.Mixfile do
],
deps: deps(),
dialyzer: [
plt_core_path: "priv/plts",
plt_add_deps: :apps_direct,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For lolspeed 😄

plt_file: {:no_warn, "priv/plts/project.plt"},
plt_add_apps: [:mix, :dataloader, :decimal, :ex_unit]
]
]
Expand Down
5 changes: 3 additions & 2 deletions mix.lock
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
%{
"benchee": {:hex, :benchee, "1.0.1", "66b211f9bfd84bd97e6d1beaddf8fc2312aaabe192f776e8931cb0c16f53a521", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}], "hexpm", "3ad58ae787e9c7c94dd7ceda3b587ec2c64604563e049b2a0e8baafae832addb"},
"benchee": {:hex, :benchee, "1.1.0", "f3a43817209a92a1fade36ef36b86e1052627fd8934a8b937ac9ab3a76c43062", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}], "hexpm", "7da57d545003165a012b587077f6ba90b89210fd88074ce3c60ce239eb5e6d93"},
"dataloader": {:hex, :dataloader, "1.0.8", "114294362db98a613f231589246aa5b0ce847412e8e75c4c94f31f204d272cbf", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "eaf3c2aa2bc9dbd2f1e960561d616b7f593396c4754185b75904f6d66c82a667"},
"decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"},
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"},
Expand All @@ -12,9 +12,10 @@
"makeup_elixir": {:hex, :makeup_elixir, "0.16.0", "f8c570a0d33f8039513fbccaf7108c5d750f47d8defd44088371191b76492b0b", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "28b2cbdc13960a46ae9a8858c4bebdec3c9a6d7b4b9e7f4ed1502f8159f338e7"},
"makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"},
"makeup_graphql": {:hex, :makeup_graphql, "0.1.2", "81e2939aab6d2b81d39ee5d9e13fae02599e9ca6e1152e0eeed737a98a5f96aa", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.1", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "3390ab04ba388d52a94bbe64ef62aa4d7923ceaffac43ec948f58f631440e8fb"},
"mix_test_watch": {:hex, :mix_test_watch, "1.0.2", "34900184cbbbc6b6ed616ed3a8ea9b791f9fd2088419352a6d3200525637f785", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "47ac558d8b06f684773972c6d04fcc15590abdb97aeb7666da19fcbfdc441a07"},
"mix_test_watch": {:hex, :mix_test_watch, "1.1.0", "330bb91c8ed271fe408c42d07e0773340a7938d8a0d281d57a14243eae9dc8c3", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "52b6b1c476cbb70fd899ca5394506482f12e5f6b0d6acff9df95c7f1e0812ec3"},
"nimble_parsec": {:hex, :nimble_parsec, "1.2.3", "244836e6e3f1200c7f30cb56733fd808744eca61fd182f731eac4af635cc6d0b", [:mix], [], "hexpm", "c8d789e39b9131acf7b99291e93dae60ab48ef14a7ee9d58c6964f59efb570b0"},
"opentelemetry_api": {:hex, :opentelemetry_api, "1.2.0", "454a35655b4c1924405ef1f3587f2c6f141bf73366b2c5e8a38dcc619b53eaa0", [:mix, :rebar3], [], "hexpm", "9e677c68243de0f70538798072e66e1fb1d4a2ca8888a6eb493c0a41e5480c35"},
"opentelemetry_process_propagator": {:hex, :opentelemetry_process_propagator, "0.2.1", "20ac37648faf7175cade16fda8d58e6f1ff1b7f2a50a8ef9d70a032c41aba315", [:mix, :rebar3], [{:opentelemetry_api, "~> 1.0", [hex: :opentelemetry_api, repo: "hexpm", optional: false]}], "hexpm", "f317237e39636d4f6140afa5d419e85ed3dc9e9a57072e7cd442df42af7b8aac"},
"statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"},
"telemetry": {:hex, :telemetry, "0.4.3", "a06428a514bdbc63293cd9a6263aad00ddeb66f608163bdec7c8995784080818", [:rebar3], [], "hexpm", "eb72b8365ffda5bed68a620d1da88525e326cb82a75ee61354fc24b844768041"},
}