diff --git a/CHANGELOG.md b/CHANGELOG.md index 6e58ef0..22596e8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ - PostgreSQL adapter [#5](https://github.com/elixir-dbvisor/sql/pull/5). - TDS adapter [#5](https://github.com/elixir-dbvisor/sql/pull/5). - Improve SQL generation with 4-600x compared to Ecto [#7](https://github.com/elixir-dbvisor/sql/pull/7). + - Ensure inspect follows the standard [representation](https://hexdocs.pm/elixir/Inspect.html#module-inspect-representation) [#4](https://github.com/elixir-dbvisor/sql/pull/4) ### Deprecation - token_to_sql/2 is deprecated in favor of SQL.Token behaviour token_to_string/2 [#5](https://github.com/elixir-dbvisor/sql/pull/5). diff --git a/README.md b/README.md index ca3b79f..f3f2a64 100644 --- a/README.md +++ b/README.md @@ -18,18 +18,20 @@ Brings an extensible SQL parser and sigil to Elixir, confidently write SQL with ```elixir iex(1)> email = "john@example.com" "john@example.com" -iex(2)> select = ~SQL"select id, email" -"select id, email" -iex(3)> ~SQL[from users] |> ~SQL[where email = #{email}] |> select -"select id, email from users where email = \"john@example.com\"" -iex(4)> sql = ~SQL[from users where email = #{email} select id, email] -"select id, email from users where email = \"john@example.com\"" +iex(2)> ~SQL[from users] |> ~SQL[where email = {{email}}] |> ~SQL"select id, email" +~SQL""" +where email = {{email}} from users select id, email +""" +iex(4)> sql = ~SQL[from users where email = {{email}} select id, email] +~SQL""" +from users where email = {{email}} select id, email +""" iex(5)> to_sql(sql) -{"select id, email from users where email = $0", ["john@example.com"]} +{"select id, email from users where email = ?", ["john@example.com"]} iex(6)> to_string(sql) -"select id, email from users where email = $0" +"select id, email from users where email = ?" iex(7)> inspect(sql) -"select id, email from users where email = \"john@example.com\"" +"~SQL\"\"\"\nfrom users where email = {{email}} select id, email\n\"\"\"" ``` ### Leverage the Enumerable protocol in your repository diff --git a/lib/lexer.ex b/lib/lexer.ex index b8e0370..36c36b6 100644 --- a/lib/lexer.ex +++ b/lib/lexer.ex @@ -24,13 +24,13 @@ defmodule SQL.Lexer do def expected_delimiter(:backtick), do: :"`" def expected_delimiter(type) when type in ~w[var code braces]a, do: :"}" - def lex(binary, meta, params \\ 0, opts \\ [metadata: true]) do - case lex(binary, binary, [{:binding, []}, {:params, params}, {:meta, meta} | opts], 0, 0, nil, [], [], 0) do + def lex(binary, file, params \\ 0, opts \\ [metadata: true]) do + case lex(binary, binary, [{:binding, []}, {:params, params}, {:file, file} | opts], 0, 0, nil, [], [], 0) do {"", _binary, opts, line, column, nil = type, data, acc, _n} -> {:ok, opts, line, column, type, data, acc} {"", binary, _opts, end_line, end_column, type, _data, [{_, [line: line, column: column, file: file], _}|_], _n} when type in ~w[parens bracket double_quote quote backtick var code]a -> - raise TokenMissingError, file: "#{file}", snippet: binary, end_line: end_line, end_column: end_column, line: line, column: column, opening_delimiter: opening_delimiter(type), expected_delimiter: expected_delimiter(type) + raise TokenMissingError, file: file, snippet: binary, end_line: end_line, end_column: end_column, line: line, column: column, opening_delimiter: opening_delimiter(type), expected_delimiter: expected_delimiter(type) {"", _binary, opts, line, column, type, data, acc, _n} -> {:ok, opts, line, column, type, data, insert_node(node(ident(type, data), line, column, data, opts), acc)} @@ -51,7 +51,7 @@ defmodule SQL.Lexer do def lex(<>, binary, opts, line, column, :comments, data, acc, n) do lex(rest, binary, opts, line, column+1, :comments, [data | [b]], acc, n) end - def lex(<>, binary, opts, line, column, _type, data, acc, n) do + def lex(<>, binary, opts, line, column, nil, data, acc, n) do lex(rest, binary, opts, line, column+2, :var, data, acc, n) end def lex(<>, binary, [_, _, _, {:format, true}] = opts, line, column, _type, data, acc, 0 = n), do: lex(rest, binary, opts, line, column+2, nil, [], insert_node(node(:binding, line, column, data, opts), acc), n) @@ -266,7 +266,7 @@ defmodule SQL.Lexer do def type(_b, _type), do: :ident def meta(_line, _column, [_,_,_,{_,false}|_]), do: [] - def meta(line, column, [_, _, {_, {_, _, file}} |_]), do: [line: line, column: column, file: file] + def meta(line, column, [_, _, {_, file} |_]), do: [line: line, column: column, file: file] def node(:binding = tag, line, column, [idx], [{:binding, false}, {:params, params}|_] = opts) do {tag, meta(line, column, opts), Enum.at(params, idx)} diff --git a/lib/mix/tasks/sql.gen.parser.ex b/lib/mix/tasks/sql.gen.parser.ex index 1f3e709..4364fb9 100644 --- a/lib/mix/tasks/sql.gen.parser.ex +++ b/lib/mix/tasks/sql.gen.parser.ex @@ -302,13 +302,13 @@ defmodule Mix.Tasks.Sql.Gen.Parser do def expected_delimiter(:backtick), do: :"`" def expected_delimiter(type) when type in ~w[var code braces]a, do: :"}" - def lex(binary, meta, params \\\\ 0, opts \\\\ [metadata: true]) do - case lex(binary, binary, [{:binding, []}, {:params, params}, {:meta, meta} | opts], 0, 0, nil, [], [], 0) do + def lex(binary, file, params \\\\ 0, opts \\\\ [metadata: true]) do + case lex(binary, binary, [{:binding, []}, {:params, params}, {:file, file} | opts], 0, 0, nil, [], [], 0) do {"", _binary, opts, line, column, nil = type, data, acc, _n} -> {:ok, opts, line, column, type, data, acc} {"", binary, _opts, end_line, end_column, type, _data, [{_, [line: line, column: column, file: file], _}|_], _n} when type in ~w[parens bracket double_quote quote backtick var code]a -> - raise TokenMissingError, file: "\#{file}", snippet: binary, end_line: end_line, end_column: end_column, line: line, column: column, opening_delimiter: opening_delimiter(type), expected_delimiter: expected_delimiter(type) + raise TokenMissingError, file: file, snippet: binary, end_line: end_line, end_column: end_column, line: line, column: column, opening_delimiter: opening_delimiter(type), expected_delimiter: expected_delimiter(type) {"", _binary, opts, line, column, type, data, acc, _n} -> {:ok, opts, line, column, type, data, insert_node(node(ident(type, data), line, column, data, opts), acc)} @@ -329,7 +329,7 @@ defmodule Mix.Tasks.Sql.Gen.Parser do def lex(<>, binary, opts, line, column, :comments, data, acc, n) do lex(rest, binary, opts, line, column+1, :comments, [data | [b]], acc, n) end - def lex(<>, binary, opts, line, column, _type, data, acc, n) do + def lex(<>, binary, opts, line, column, nil, data, acc, n) do lex(rest, binary, opts, line, column+2, :var, data, acc, n) end def lex(<>, binary, [_, _, _, {:format, true}] = opts, line, column, _type, data, acc, 0 = n), do: lex(rest, binary, opts, line, column+2, nil, [], insert_node(node(:binding, line, column, data, opts), acc), n) @@ -544,7 +544,7 @@ defmodule Mix.Tasks.Sql.Gen.Parser do def type(_b, _type), do: :ident def meta(_line, _column, [_,_,_,{_,false}|_]), do: [] - def meta(line, column, [_, _, {_, {_, _, file}} |_]), do: [line: line, column: column, file: file] + def meta(line, column, [_, _, {_, file} |_]), do: [line: line, column: column, file: file] def node(:binding = tag, line, column, [idx], [{:binding, false}, {:params, params}|_] = opts) do {tag, meta(line, column, opts), Enum.at(params, idx)} diff --git a/lib/sql.ex b/lib/sql.ex index e295d6c..02bdeb1 100644 --- a/lib/sql.ex +++ b/lib/sql.ex @@ -38,48 +38,7 @@ defmodule SQL do {"select id, email from users where email = $0", ["john@example.com"]} """ @doc since: "0.1.0" - def to_sql(sql), do: {:persistent_term.get(sql.id), sql.params} - - @doc false - def build(right, {:<<>>, meta, _} = left, _modifiers, env) do - quote bind_quoted: [right: Macro.unpipe(right), left: left, meta: Macro.escape({meta[:line], meta[:column] || 0, env.file}), e: Macro.escape(env)] do - {t, p} = Enum.reduce(right, {[], []}, fn - {[], 0}, acc -> acc - {v, 0}, {tokens, params} -> {tokens ++ v.tokens, params ++ v.params} - end) - binding = binding() - id = {__MODULE__, :binary.decode_unsigned(left), meta} - {tokens, params} = tokens(left, meta, length(p), id) - tokens = t ++ tokens - params = Enum.reduce(params, p, fn - {:var, var}, acc -> acc ++ [binding[String.to_atom(var)]] - {:code, code}, acc -> acc ++ [elem(Code.eval_string(code, binding), 0)] - end) - struct(SQL, params: params, tokens: tokens, id: plan(id, tokens), module: __MODULE__) - end - end - - def tokens(left, meta, p, id) do - if result = :persistent_term.get(id, nil) do - result - else - {:ok, opts, _, _, _, _, tokens} = SQL.Lexer.lex(left, meta, p) - result = {tokens, opts[:binding]} - :persistent_term.put(id, result) - result - end - end - - def plan(id, tokens) do - if uid = :persistent_term.get(tokens, nil) do - uid - else - uid = System.unique_integer([:positive]) - :persistent_term.put(tokens, uid) - :persistent_term.put(uid, to_string(SQL.to_query(SQL.Parser.parse(tokens)), elem(id, 0))) - uid - end - end + def to_sql(%{params: params, id: id, module: module}), do: {:persistent_term.get({module, id, :plan}), params} @doc """ Handles the sigil `~SQL` for SQL. @@ -124,7 +83,16 @@ defmodule SQL do token end + defimpl Inspect, for: SQL do + def inspect(sql, _opts), do: Inspect.Algebra.concat(["~SQL\"\"\"\n", :persistent_term.get({sql.id, :inspect}), "\n\"\"\""]) + end + + defimpl String.Chars, for: SQL do + def to_string(%{id: id, module: module}), do: :persistent_term.get({module, id, :plan}) + def to_string(%{tokens: tokens, module: module}), do: SQL.to_string(tokens, module) + end + @doc false def to_string(tokens, module) do fun = cond do Kernel.function_exported?(module, :sql_config, 0) -> &module.sql_config()[:adapter].token_to_string(&1) @@ -143,19 +111,99 @@ defmodule SQL do |> IO.iodata_to_binary() end + @doc false + def build(left, {:<<>>, _, _} = right, _modifiers, env) do + data = build(left, right) + quote bind_quoted: [module: env.module, left: Macro.unpipe(left), right: right, file: env.file, id: id(data), data: data] do + plan_inspect(data, id) + {t, p} = Enum.reduce(left, {[], []}, fn + {[], 0}, acc -> acc + {v, 0}, {t, p} -> + {t ++ v.tokens, p ++ v.params} + end) + {tokens, params} = tokens(right, file, length(p), id) + tokens = t ++ tokens + plan(tokens, id, module) + struct(SQL, params: cast_params(params, p, binding()), tokens: tokens, id: id, module: module) + end + end + + @doc false + def build(left, {:<<>>, _, right}) do + left + |> Macro.unpipe() + |> Enum.reduce({:iodata, right}, fn + {[], 0}, acc -> acc + {{:sigil_SQL, _meta, [{:<<>>, _, value}, []]}, 0}, {type, acc} -> {type, [value, ?\s, acc]} + {{_, _, _} = var, 0}, {_, acc} -> + {:dynamic, [var, ?\s, acc]} + end) + |> case do + {:iodata, data} -> IO.iodata_to_binary(data) + {:dynamic, data} -> data + end + end - defimpl Inspect, for: SQL do - def inspect(sql, _opts) do - if Kernel.function_exported?(sql.module, :sql_config, 0) do - Enum.reduce(0..length(sql.params), :persistent_term.get(sql.id), &String.replace(&2, sql.module.sql_config()[:adapter].token_to_string({:binding, [], [&1]}), sql.module.sql_config()[:adapter].token_to_string(Enum.at(sql.params, &1)), global: false)) - else - Enum.reduce(0..length(sql.params), :persistent_term.get(sql.id), &String.replace(&2, SQL.String.token_to_sql({:binding, [], [&1]}), SQL.String.token_to_sql(Enum.at(sql.params, &1)))) - end + @doc false + def id(data) do + if id = :persistent_term.get(data, nil) do + id + else + id = System.unique_integer([:positive]) + :persistent_term.put(data, id) + id end end - defimpl String.Chars, for: SQL do - def to_string(%{id: id}), do: :persistent_term.get(id) - def to_string(%{tokens: tokens, module: module}), do: SQL.to_string(tokens, module) + @doc false + def cast_params(bindings, params, binding) do + Enum.reduce(bindings, params, fn + {:var, var}, acc -> if v = binding[String.to_atom(var)], do: acc ++ [v], else: acc + {:code, code}, acc -> acc ++ [elem(Code.eval_string(code, binding), 0)] + end) + end + + @doc false + def tokens(binary, file, count, id) do + key = {id, :lex} + if result = :persistent_term.get(key, nil) do + result + else + {:ok, opts, _, _, _, _, tokens} = SQL.Lexer.lex(binary, file, count) + result = {tokens, opts[:binding]} + :persistent_term.put(key, result) + result + end + end + + @doc false + def plan(tokens, id, module) do + key = {module, id, :plan} + if :persistent_term.get(key, nil) do + id + else + :persistent_term.put(key, to_string(SQL.to_query(SQL.Parser.parse(tokens)), module)) + id + end + end + + @doc false + def plan_inspect(data, id) do + key = {id, :inspect} + if !:persistent_term.get(key, nil) do + data = case data do + data when is_list(data) -> + data + |> Enum.map(fn + ast when is_struct(ast) -> :persistent_term.get({ast.id, :inspect}, nil) + x -> x + end) + |> IO.iodata_to_binary() + + data -> data + end + + :persistent_term.put(key, data) + end end end diff --git a/test/sql_test.exs b/test/sql_test.exs index 262a887..ba321b1 100644 --- a/test/sql_test.exs +++ b/test/sql_test.exs @@ -33,7 +33,7 @@ defmodule SQLTest do end test "inspect/1" do - assert "select +1000" == inspect(~SQL[select +1000]) + assert ~s(~SQL"""\nselect +1000\n""") == inspect(~SQL[select +1000]) end test "to_sql/1" do