diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3fabbf50..732ae3f5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,7 @@ jobs: - elixir: 1.15.6 otp: 24.3.4.13 - elixir: 1.15.6 - otp: 25.3.2.6 + otp: 26.1.2 lint: lint steps: - name: Checkout diff --git a/lib/ecto/adapters/myxql/connection.ex b/lib/ecto/adapters/myxql/connection.ex index 55087514..b5cc5bff 100644 --- a/lib/ecto/adapters/myxql/connection.ex +++ b/lib/ecto/adapters/myxql/connection.ex @@ -192,7 +192,7 @@ if Code.ensure_loaded?(MyXQL) do defp on_conflict({fields, _, []}, _header) when is_list(fields) do [ " ON DUPLICATE KEY UPDATE " - | intersperse_map(fields, ?,, fn field -> + | Enum.map_intersperse(fields, ?,, fn field -> quoted = quote_name(field) [quoted, " = VALUES(", quoted, ?)] end) @@ -213,8 +213,8 @@ if Code.ensure_loaded?(MyXQL) do defp insert_all(rows) when is_list(rows) do [ "VALUES ", - intersperse_map(rows, ?,, fn row -> - [?(, intersperse_map(row, ?,, &insert_all_value/1), ?)] + Enum.map_intersperse(rows, ?,, fn row -> + [?(, Enum.map_intersperse(row, ?,, &insert_all_value/1), ?)] end) ] end @@ -229,10 +229,10 @@ if Code.ensure_loaded?(MyXQL) do @impl true def update(prefix, table, fields, filters, _returning) do - fields = intersperse_map(fields, ", ", &[quote_name(&1), " = ?"]) + fields = Enum.map_intersperse(fields, ", ", &[quote_name(&1), " = ?"]) filters = - intersperse_map(filters, " AND ", fn + Enum.map_intersperse(filters, " AND ", fn {field, nil} -> [quote_name(field), " IS NULL"] @@ -246,7 +246,7 @@ if Code.ensure_loaded?(MyXQL) do @impl true def delete(prefix, table, filters, _returning) do filters = - intersperse_map(filters, " AND ", fn + Enum.map_intersperse(filters, " AND ", fn {field, nil} -> [quote_name(field), " IS NULL"] @@ -330,7 +330,7 @@ if Code.ensure_loaded?(MyXQL) do do: "TRUE" defp select(fields, sources, query) do - intersperse_map(fields, ", ", fn + Enum.map_intersperse(fields, ", ", fn {:&, _, [idx]} -> case elem(sources, idx) do {nil, source, nil} -> @@ -367,7 +367,7 @@ if Code.ensure_loaded?(MyXQL) do defp cte(%{with_ctes: %WithExpr{queries: [_ | _]}} = query, sources) do %{with_ctes: with} = query recursive_opt = if with.recursive, do: "RECURSIVE ", else: "" - ctes = intersperse_map(with.queries, ", ", &cte_expr(&1, sources, query)) + ctes = Enum.map_intersperse(with.queries, ", ", &cte_expr(&1, sources, query)) ["WITH ", recursive_opt, ctes, " "] end @@ -442,7 +442,7 @@ if Code.ensure_loaded?(MyXQL) do defp using_join(%{joins: joins} = query, kind, sources) do froms = - intersperse_map(joins, ", ", fn + Enum.map_intersperse(joins, ", ", fn %JoinExpr{source: %Ecto.SubQuery{params: [_ | _]}} -> error!( query, @@ -511,8 +511,8 @@ if Code.ensure_loaded?(MyXQL) do defp group_by(%{group_bys: group_bys} = query, sources) do [ " GROUP BY " - | intersperse_map(group_bys, ", ", fn %QueryExpr{expr: expr} -> - intersperse_map(expr, ", ", &expr(&1, sources, query)) + | Enum.map_intersperse(group_bys, ", ", fn %QueryExpr{expr: expr} -> + Enum.map_intersperse(expr, ", ", &expr(&1, sources, query)) end) ] end @@ -522,22 +522,22 @@ if Code.ensure_loaded?(MyXQL) do defp window(%{windows: windows} = query, sources) do [ " WINDOW " - | intersperse_map(windows, ", ", fn {name, %{expr: kw}} -> + | Enum.map_intersperse(windows, ", ", fn {name, %{expr: kw}} -> [quote_name(name), " AS " | window_exprs(kw, sources, query)] end) ] end defp window_exprs(kw, sources, query) do - [?(, intersperse_map(kw, ?\s, &window_expr(&1, sources, query)), ?)] + [?(, Enum.map_intersperse(kw, ?\s, &window_expr(&1, sources, query)), ?)] end defp window_expr({:partition_by, fields}, sources, query) do - ["PARTITION BY " | intersperse_map(fields, ", ", &expr(&1, sources, query))] + ["PARTITION BY " | Enum.map_intersperse(fields, ", ", &expr(&1, sources, query))] end defp window_expr({:order_by, fields}, sources, query) do - ["ORDER BY " | intersperse_map(fields, ", ", &order_by_expr(&1, sources, query))] + ["ORDER BY " | Enum.map_intersperse(fields, ", ", &order_by_expr(&1, sources, query))] end defp window_expr({:frame, {:fragment, _, _} = fragment}, sources, query) do @@ -549,8 +549,8 @@ if Code.ensure_loaded?(MyXQL) do defp order_by(%{order_bys: order_bys} = query, sources) do [ " ORDER BY " - | intersperse_map(order_bys, ", ", fn %QueryExpr{expr: expr} -> - intersperse_map(expr, ", ", &order_by_expr(&1, sources, query)) + | Enum.map_intersperse(order_bys, ", ", fn %QueryExpr{expr: expr} -> + Enum.map_intersperse(expr, ", ", &order_by_expr(&1, sources, query)) end) ] end @@ -654,7 +654,7 @@ if Code.ensure_loaded?(MyXQL) do end defp expr({:in, _, [left, right]}, sources, query) when is_list(right) do - args = intersperse_map(right, ?,, &expr(&1, sources, query)) + args = Enum.map_intersperse(right, ?,, &expr(&1, sources, query)) [expr(left, sources, query), " IN (", args, ?)] end @@ -759,7 +759,7 @@ if Code.ensure_loaded?(MyXQL) do end defp expr({:{}, _, elems}, sources, query) do - [?(, intersperse_map(elems, ?,, &expr(&1, sources, query)), ?)] + [?(, Enum.map_intersperse(elems, ?,, &expr(&1, sources, query)), ?)] end defp expr({:count, _, []}, _sources, _query), do: "count(*)" @@ -790,7 +790,7 @@ if Code.ensure_loaded?(MyXQL) do [op_to_binary(left, sources, query), op | op_to_binary(right, sources, query)] {:fun, fun} -> - [fun, ?(, modifier, intersperse_map(args, ", ", &expr(&1, sources, query)), ?)] + [fun, ?(, modifier, Enum.map_intersperse(args, ", ", &expr(&1, sources, query)), ?)] end end @@ -847,14 +847,14 @@ if Code.ensure_loaded?(MyXQL) do [ "VALUES ", - intersperse_map(rows, ?,, fn _ -> + Enum.map_intersperse(rows, ?,, fn _ -> ["ROW(", values_expr(types, query), ?)] end) ] end defp values_expr(types, query) do - intersperse_map(types, ?,, fn {_field, type} -> + Enum.map_intersperse(types, ?,, fn {_field, type} -> ["CAST(", ??, " AS ", ecto_cast_to_db(type, query), ?)] end) end @@ -989,7 +989,7 @@ if Code.ensure_loaded?(MyXQL) do quote_table(index.prefix, index.table), ?\s, ?(, - intersperse_map(index.columns, ", ", &index_expr/1), + Enum.map_intersperse(index.columns, ", ", &index_expr/1), ?), if_do(index.using, [" USING ", to_string(index.using)]), if_do(index.concurrently, " LOCK=NONE") @@ -1097,7 +1097,7 @@ if Code.ensure_loaded?(MyXQL) do end defp column_definitions(table, columns) do - intersperse_map(columns, ", ", &column_definition(table, &1)) + Enum.map_intersperse(columns, ", ", &column_definition(table, &1)) end defp column_definition(table, {:add, name, %Reference{} = ref, opts}) do @@ -1115,7 +1115,7 @@ if Code.ensure_loaded?(MyXQL) do end defp column_changes(table, columns) do - intersperse_map(columns, ", ", &column_change(table, &1)) + Enum.map_intersperse(columns, ", ", &column_change(table, &1)) end defp column_change(_table, {_command, _name, %Reference{validate: false}, _opts}) do @@ -1390,7 +1390,7 @@ if Code.ensure_loaded?(MyXQL) do [?`, name, ?`] end - defp quote_names(names), do: intersperse_map(names, ?,, "e_name/1) + defp quote_names(names), do: Enum.map_intersperse(names, ?,, "e_name/1) defp quote_table(nil, name), do: quote_table(name) defp quote_table(prefix, name), do: [quote_table(prefix), ?., quote_table(name)] @@ -1409,17 +1409,6 @@ if Code.ensure_loaded?(MyXQL) do defp format_to_sql(:map), do: "FORMAT=JSON" defp format_to_sql(:text), do: "FORMAT=TRADITIONAL" - defp intersperse_map(list, separator, mapper, acc \\ []) - - defp intersperse_map([], _separator, _mapper, acc), - do: acc - - defp intersperse_map([elem], _separator, mapper, acc), - do: [acc | mapper.(elem)] - - defp intersperse_map([elem | rest], separator, mapper, acc), - do: intersperse_map(rest, separator, mapper, [acc, mapper.(elem), separator]) - defp if_do(condition, value) do if condition, do: value, else: [] end diff --git a/lib/ecto/adapters/postgres/connection.ex b/lib/ecto/adapters/postgres/connection.ex index aa140a7c..980edd5a 100644 --- a/lib/ecto/adapters/postgres/connection.ex +++ b/lib/ecto/adapters/postgres/connection.ex @@ -223,7 +223,7 @@ if Code.ensure_loaded?(Postgrex) do values = if header == [] do - [" VALUES " | intersperse_map(rows, ?,, fn _ -> "(DEFAULT)" end)] + [" VALUES " | Enum.map_intersperse(rows, ?,, fn _ -> "(DEFAULT)" end)] else [" (", quote_names(header), ") " | insert_all(rows, counter_offset)] end @@ -276,7 +276,7 @@ if Code.ensure_loaded?(Postgrex) do defp replace(fields) do [ "UPDATE SET " - | intersperse_map(fields, ?,, fn field -> + | Enum.map_intersperse(fields, ?,, fn field -> quoted = quote_name(field) [quoted, " = ", "EXCLUDED." | quoted] end) @@ -450,7 +450,7 @@ if Code.ensure_loaded?(Postgrex) do do: "TRUE" defp select_fields(fields, sources, query) do - intersperse_map(fields, ", ", fn + Enum.map_intersperse(fields, ", ", fn {:&, _, [idx]} -> case elem(sources, idx) do {nil, source, nil} -> @@ -487,7 +487,7 @@ if Code.ensure_loaded?(Postgrex) do defp distinct(%QueryExpr{expr: exprs}, sources, query) do {[ " DISTINCT ON (", - intersperse_map(exprs, ", ", fn {_, expr} -> expr(expr, sources, query) end), + Enum.map_intersperse(exprs, ", ", fn {_, expr} -> expr(expr, sources, query) end), ?) ], exprs} end @@ -500,7 +500,7 @@ if Code.ensure_loaded?(Postgrex) do defp cte(%{with_ctes: %WithExpr{queries: [_ | _]}} = query, sources) do %{with_ctes: with} = query recursive_opt = if with.recursive, do: "RECURSIVE ", else: "" - ctes = intersperse_map(with.queries, ", ", &cte_expr(&1, sources, query)) + ctes = Enum.map_intersperse(with.queries, ", ", &cte_expr(&1, sources, query)) ["WITH ", recursive_opt, ctes, " "] end @@ -609,7 +609,7 @@ if Code.ensure_loaded?(Postgrex) do end froms = - intersperse_map(inner_joins, ", ", fn + Enum.map_intersperse(inner_joins, ", ", fn %JoinExpr{qual: :inner, ix: ix, source: source} -> {join, name} = get_source(query, sources, ix, source) [join, " AS " | [name]] @@ -627,7 +627,7 @@ if Code.ensure_loaded?(Postgrex) do defp using_join(%{joins: joins} = query, kind, prefix, sources) do froms = - intersperse_map(joins, ", ", fn + Enum.map_intersperse(joins, ", ", fn %JoinExpr{qual: :inner, ix: ix, source: source} -> {join, name} = get_source(query, sources, ix, source) [join, " AS " | name] @@ -649,7 +649,7 @@ if Code.ensure_loaded?(Postgrex) do defp join(%{joins: joins} = query, sources) do [ ?\s - | intersperse_map(joins, ?\s, fn + | Enum.map_intersperse(joins, ?\s, fn %JoinExpr{ on: %QueryExpr{expr: expr}, qual: qual, @@ -700,9 +700,9 @@ if Code.ensure_loaded?(Postgrex) do defp group_by(%{group_bys: group_bys} = query, sources) do [ " GROUP BY " - | intersperse_map(group_bys, ", ", fn + | Enum.map_intersperse(group_bys, ", ", fn %QueryExpr{expr: expr} -> - intersperse_map(expr, ", ", &expr(&1, sources, query)) + Enum.map_intersperse(expr, ", ", &expr(&1, sources, query)) end) ] end @@ -712,22 +712,22 @@ if Code.ensure_loaded?(Postgrex) do defp window(%{windows: windows} = query, sources) do [ " WINDOW " - | intersperse_map(windows, ", ", fn {name, %{expr: kw}} -> + | Enum.map_intersperse(windows, ", ", fn {name, %{expr: kw}} -> [quote_name(name), " AS " | window_exprs(kw, sources, query)] end) ] end defp window_exprs(kw, sources, query) do - [?(, intersperse_map(kw, ?\s, &window_expr(&1, sources, query)), ?)] + [?(, Enum.map_intersperse(kw, ?\s, &window_expr(&1, sources, query)), ?)] end defp window_expr({:partition_by, fields}, sources, query) do - ["PARTITION BY " | intersperse_map(fields, ", ", &expr(&1, sources, query))] + ["PARTITION BY " | Enum.map_intersperse(fields, ", ", &expr(&1, sources, query))] end defp window_expr({:order_by, fields}, sources, query) do - ["ORDER BY " | intersperse_map(fields, ", ", &order_by_expr(&1, sources, query))] + ["ORDER BY " | Enum.map_intersperse(fields, ", ", &order_by_expr(&1, sources, query))] end defp window_expr({:frame, {:fragment, _, _} = fragment}, sources, query) do @@ -739,7 +739,7 @@ if Code.ensure_loaded?(Postgrex) do defp order_by(%{order_bys: order_bys} = query, distinct, sources) do order_bys = Enum.flat_map(order_bys, & &1.expr) order_bys = order_by_concat(distinct, order_bys) - [" ORDER BY " | intersperse_map(order_bys, ", ", &order_by_expr(&1, sources, query))] + [" ORDER BY " | Enum.map_intersperse(order_bys, ", ", &order_by_expr(&1, sources, query))] end defp order_by_concat([head | left], [head | right]), do: [head | order_by_concat(left, right)] @@ -852,7 +852,7 @@ if Code.ensure_loaded?(Postgrex) do end defp expr({:in, _, [left, right]}, sources, query) when is_list(right) do - args = intersperse_map(right, ?,, &expr(&1, sources, query)) + args = Enum.map_intersperse(right, ?,, &expr(&1, sources, query)) [expr(left, sources, query), " IN (", args, ?)] end @@ -954,7 +954,7 @@ if Code.ensure_loaded?(Postgrex) do end defp expr({:{}, _, elems}, sources, query) do - [?(, intersperse_map(elems, ?,, &expr(&1, sources, query)), ?)] + [?(, Enum.map_intersperse(elems, ?,, &expr(&1, sources, query)), ?)] end defp expr({:count, _, []}, _sources, _query), do: "count(*)" @@ -984,7 +984,7 @@ if Code.ensure_loaded?(Postgrex) do [maybe_paren(left, sources, query), op | maybe_paren(right, sources, query)] {:fun, fun} -> - [fun, ?(, modifier, intersperse_map(args, ", ", &expr(&1, sources, query)), ?)] + [fun, ?(, modifier, Enum.map_intersperse(args, ", ", &expr(&1, sources, query)), ?)] end end @@ -1003,7 +1003,7 @@ if Code.ensure_loaded?(Postgrex) do end defp expr(list, sources, query) when is_list(list) do - ["ARRAY[", intersperse_map(list, ?,, &expr(&1, sources, query)), ?]] + ["ARRAY[", Enum.map_intersperse(list, ?,, &expr(&1, sources, query)), ?]] end defp expr(%Decimal{} = decimal, _sources, _query) do @@ -1044,7 +1044,7 @@ if Code.ensure_loaded?(Postgrex) do end defp json_extract_path(expr, path, sources, query) do - path = intersperse_map(path, ?,, &escape_json/1) + path = Enum.map_intersperse(path, ?,, &escape_json/1) [?(, expr(expr, sources, query), "#>'{", path, "}')"] end @@ -1207,8 +1207,8 @@ if Code.ensure_loaded?(Postgrex) do end def execute_ddl({command, %Index{} = index}) when command in @creates do - fields = intersperse_map(index.columns, ", ", &index_expr/1) - include_fields = intersperse_map(index.include, ", ", &index_expr/1) + fields = Enum.map_intersperse(index.columns, ", ", &index_expr/1) + include_fields = Enum.map_intersperse(index.include, ", ", &index_expr/1) maybe_nulls_distinct = case index.nulls_distinct do @@ -1388,7 +1388,7 @@ if Code.ensure_loaded?(Postgrex) do end defp column_definitions(table, columns) do - intersperse_map(columns, ", ", &column_definition(table, &1)) + Enum.map_intersperse(columns, ", ", &column_definition(table, &1)) end defp column_definition(table, {:add, name, %Reference{} = ref, opts}) do @@ -1407,7 +1407,7 @@ if Code.ensure_loaded?(Postgrex) do end defp column_changes(table, columns) do - intersperse_map(columns, ", ", &column_change(table, &1)) + Enum.map_intersperse(columns, ", ", &column_change(table, &1)) end defp column_change(table, {:add, name, %Reference{} = ref, opts}) do @@ -1758,7 +1758,7 @@ if Code.ensure_loaded?(Postgrex) do end defp quote_names(names) do - intersperse_map(names, ?,, "e_name/1) + Enum.map_intersperse(names, ?,, "e_name/1) end defp quote_name(name) when is_atom(name) do @@ -1799,17 +1799,6 @@ if Code.ensure_loaded?(Postgrex) do defp single_quote(value), do: [?', escape_string(value), ?'] - defp intersperse_map(list, separator, mapper, acc \\ []) - - defp intersperse_map([], _separator, _mapper, acc), - do: acc - - defp intersperse_map([elem], _separator, mapper, acc), - do: [acc | mapper.(elem)] - - defp intersperse_map([elem | rest], separator, mapper, acc), - do: intersperse_map(rest, separator, mapper, [acc, mapper.(elem), separator]) - defp intersperse_reduce(list, separator, user_acc, reducer, acc \\ []) defp intersperse_reduce([], _separator, user_acc, _reducer, acc), diff --git a/lib/ecto/adapters/tds/connection.ex b/lib/ecto/adapters/tds/connection.ex index 842acaac..8ef788d0 100644 --- a/lib/ecto/adapters/tds/connection.ex +++ b/lib/ecto/adapters/tds/connection.ex @@ -393,7 +393,7 @@ if Code.ensure_loaded?(Tds) do end defp select(fields, sources, query) do - intersperse_map(fields, ", ", fn + Enum.map_intersperse(fields, ", ", fn {:&, _, [idx]} -> case elem(sources, idx) do {nil, source, nil} -> @@ -435,7 +435,7 @@ if Code.ensure_loaded?(Tds) do end defp cte(%{with_ctes: %WithExpr{queries: [_ | _] = queries}} = query, sources) do - ctes = intersperse_map(queries, ", ", &cte_expr(&1, sources, query)) + ctes = Enum.map_intersperse(queries, ", ", &cte_expr(&1, sources, query)) ["WITH ", ctes, " "] end @@ -467,7 +467,7 @@ if Code.ensure_loaded?(Tds) do defp cte_header(%Ecto.Query{select: %{fields: fields}} = query, _) do [ " (", - intersperse_map(fields, ",", fn + Enum.map_intersperse(fields, ",", fn {key, _} -> quote_name(key) @@ -526,7 +526,7 @@ if Code.ensure_loaded?(Tds) do defp join(%{joins: joins} = query, sources) do [ ?\s, - intersperse_map(joins, ?\s, fn + Enum.map_intersperse(joins, ?\s, fn %JoinExpr{on: %QueryExpr{expr: expr}, qual: qual, ix: ix, source: source, hints: hints} -> {join, name} = get_source(query, sources, ix, source) qual_text = join_qual(qual, query) @@ -571,8 +571,8 @@ if Code.ensure_loaded?(Tds) do defp group_by(%{group_bys: group_bys} = query, sources) do [ " GROUP BY " - | intersperse_map(group_bys, ", ", fn %QueryExpr{expr: expr} -> - intersperse_map(expr, ", ", &expr(&1, sources, query)) + | Enum.map_intersperse(group_bys, ", ", fn %QueryExpr{expr: expr} -> + Enum.map_intersperse(expr, ", ", &expr(&1, sources, query)) end) ] end @@ -582,8 +582,8 @@ if Code.ensure_loaded?(Tds) do defp order_by(%{order_bys: order_bys} = query, sources) do [ " ORDER BY " - | intersperse_map(order_bys, ", ", fn %QueryExpr{expr: expr} -> - intersperse_map(expr, ", ", &order_by_expr(&1, sources, query)) + | Enum.map_intersperse(order_bys, ", ", fn %QueryExpr{expr: expr} -> + Enum.map_intersperse(expr, ", ", &order_by_expr(&1, sources, query)) end) ] end @@ -860,7 +860,7 @@ if Code.ensure_loaded?(Tds) do [op_to_binary(left, sources, query), op | op_to_binary(right, sources, query)] {:fun, fun} -> - [fun, ?(, modifier, intersperse_map(args, ", ", &expr(&1, sources, query)), ?)] + [fun, ?(, modifier, Enum.map_intersperse(args, ", ", &expr(&1, sources, query)), ?)] end end @@ -995,7 +995,7 @@ if Code.ensure_loaded?(Tds) do defp returning([], _verb), do: [] defp returning(returning, verb) when is_list(returning) do - [" OUTPUT ", intersperse_map(returning, ", ", &[verb, ?., quote_name(&1)])] + [" OUTPUT ", Enum.map_intersperse(returning, ", ", &[verb, ?., quote_name(&1)])] end defp returning(%{select: nil}, _, _), @@ -1004,7 +1004,7 @@ if Code.ensure_loaded?(Tds) do defp returning(%{select: %{fields: fields}} = query, idx, verb), do: [ " OUTPUT " - | intersperse_map(fields, ", ", fn + | Enum.map_intersperse(fields, ", ", fn {{:., _, [{:&, _, [^idx]}, key]}, _, _} -> [verb, ?., quote_name(key)] _ -> error!(query, "MSSQL can only return table #{verb} columns") end) @@ -1157,7 +1157,7 @@ if Code.ensure_loaded?(Tds) do include = index.include |> List.wrap() - |> intersperse_map(", ", &index_expr/1) + |> Enum.map_intersperse(", ", &index_expr/1) [ [ @@ -1173,7 +1173,7 @@ if Code.ensure_loaded?(Tds) do " ON ", quote_table(prefix, index.table), " (", - intersperse_map(index.columns, ", ", &index_expr/1), + Enum.map_intersperse(index.columns, ", ", &index_expr/1), ?), if_do(include != [], [" INCLUDE ", ?(, include, ?)]), if_do(index.where, [" WHERE (", index.where, ?)]), @@ -1321,7 +1321,7 @@ if Code.ensure_loaded?(Tds) do end defp column_definitions(table, columns) do - intersperse_map(columns, ", ", &column_definition(table, &1)) + Enum.map_intersperse(columns, ", ", &column_definition(table, &1)) end defp column_definition(table, {:add, name, %Reference{} = ref, opts}) do @@ -1628,7 +1628,7 @@ if Code.ensure_loaded?(Tds) do "[#{name}]" end - defp quote_names(names), do: intersperse_map(names, ?,, "e_name/1) + defp quote_names(names), do: Enum.map_intersperse(names, ?,, "e_name/1) defp quote_table(nil, name), do: quote_table(name) @@ -1681,13 +1681,6 @@ if Code.ensure_loaded?(Tds) do name end - defp intersperse_map([], _separator, _mapper), do: [] - defp intersperse_map([elem], _separator, mapper), do: mapper.(elem) - - defp intersperse_map([elem | rest], separator, mapper) do - [mapper.(elem), separator | intersperse_map(rest, separator, mapper)] - end - defp intersperse_reduce(list, separator, user_acc, reducer, acc \\ []) defp intersperse_reduce([], _separator, user_acc, _reducer, acc), diff --git a/test/ecto/adapters/myxql_test.exs b/test/ecto/adapters/myxql_test.exs index a9f9f10b..37996bb2 100644 --- a/test/ecto/adapters/myxql_test.exs +++ b/test/ecto/adapters/myxql_test.exs @@ -1485,10 +1485,16 @@ defmodule Ecto.Adapters.MyXQLTest do |> plan() |> all() + cast_types = %{bid: "binary(16)", num: "unsigned"} + from_values_text = values_text(values, cast_types) + join_values_text = values_text(values, cast_types) + select_fields = Enum.map_join(types, ", ", fn {field, _} -> "v1.`#{field}`" end) + field_names = Enum.map_join(types, ",", fn {field, _} -> "`#{field}`" end) + assert query == - ~s{SELECT v1.`bid`, v1.`num` } <> - ~s{FROM (VALUES ROW(CAST(? AS binary(16)),CAST(? AS unsigned)),ROW(CAST(? AS binary(16)),CAST(? AS unsigned))) AS v0 (`bid`,`num`) } <> - ~s{INNER JOIN (VALUES ROW(CAST(? AS binary(16)),CAST(? AS unsigned)),ROW(CAST(? AS binary(16)),CAST(? AS unsigned))) AS v1 (`bid`,`num`) ON v0.`bid` = v1.`bid` } <> + ~s{SELECT #{select_fields} } <> + ~s{FROM (#{from_values_text}) AS v0 (#{field_names}) } <> + ~s{INNER JOIN (#{join_values_text}) AS v1 (#{field_names}) ON v0.`bid` = v1.`bid` } <> ~s{WHERE (v0.`num` = ?)} end @@ -1502,9 +1508,13 @@ defmodule Ecto.Adapters.MyXQLTest do |> plan(:delete_all) |> delete_all() + cast_types = %{bid: "binary(16)", num: "unsigned"} + values_text = values_text(values, cast_types) + fields = Enum.map_join(types, ",", fn {field, _} -> "`#{field}`" end) + assert query == ~s{DELETE s0.* FROM `schema` AS s0 } <> - ~s{INNER JOIN (VALUES ROW(CAST(? AS binary(16)),CAST(? AS unsigned)),ROW(CAST(? AS binary(16)),CAST(? AS unsigned))) AS v1 (`bid`,`num`) } <> + ~s{INNER JOIN (#{values_text}) AS v1 (#{fields}) } <> ~s{ON s0.`x` = v1.`num` WHERE (v1.`num` = ?)} end @@ -1523,12 +1533,34 @@ defmodule Ecto.Adapters.MyXQLTest do |> plan(:update_all) |> update_all() + cast_types = %{bid: "binary(16)", num: "unsigned"} + values_text = values_text(values, cast_types) + fields = Enum.map_join(types, ",", fn {field, _} -> "`#{field}`" end) + assert query == ~s{UPDATE `schema` AS s0, } <> - ~s{(VALUES ROW(CAST(? AS binary(16)),CAST(? AS unsigned)),ROW(CAST(? AS binary(16)),CAST(? AS unsigned))) AS v1 (`bid`,`num`) } <> + ~s{(#{values_text}) AS v1 (#{fields}) } <> ~s{SET s0.`y` = v1.`num` WHERE (s0.`x` = v1.`num`) AND (v1.`num` = ?)} end + defp values_text(values, types) do + types = Map.to_list(types) + + [ + "VALUES ", + Enum.map_intersperse(values, ?,, fn _ -> + ["ROW(", values_expr(types), ?)] + end) + ] + |> IO.iodata_to_binary() + end + + defp values_expr(types) do + Enum.map_intersperse(types, ?,, fn {field, _} -> + ["CAST(", ??, " AS ", types[field], ?)] + end) + end + # DDL import Ecto.Migration, only: [table: 1, table: 2, index: 2, index: 3, constraint: 3] @@ -1708,15 +1740,18 @@ defmodule Ecto.Adapters.MyXQLTest do end test "create table with a map column, and a map default with values" do + default = %{foo: "bar", baz: "boom"} + default_text = "'{" <> Enum.map_join(default, ",", fn {k, v} -> ~s{"#{k}":"#{v}"} end) <> "}'" + create = {:create, table(:posts), [ - {:add, :a, :map, [default: %{foo: "bar", baz: "boom"}]} + {:add, :a, :map, [default: default]} ]} assert execute_ddl(create) == [ """ - CREATE TABLE `posts` (`a` json DEFAULT ('{\"baz\":\"boom\",\"foo\":\"bar\"}')) ENGINE = INNODB + CREATE TABLE `posts` (`a` json DEFAULT (#{default_text})) ENGINE = INNODB """ |> remove_newlines ] diff --git a/test/ecto/adapters/postgres_test.exs b/test/ecto/adapters/postgres_test.exs index 1f5ff2bd..0e15be94 100644 --- a/test/ecto/adapters/postgres_test.exs +++ b/test/ecto/adapters/postgres_test.exs @@ -1,7 +1,10 @@ +Code.require_file("../../support/connection_helpers.exs", __DIR__) + defmodule Ecto.Adapters.PostgresTest do use ExUnit.Case, async: true import Ecto.Query + import Support.ConnectionHelpers alias Ecto.Queryable alias Ecto.Adapters.Postgres.Connection, as: SQL @@ -1889,10 +1892,16 @@ defmodule Ecto.Adapters.PostgresTest do |> plan() |> all() + cast_types = %{bid: "uuid", num: "bigint"} + from_values_text = values_text(values, cast_types, 1) + join_values_text = values_text(values, cast_types, 5) + select_fields = Enum.map_join(types, ", ", fn {field, _} -> ~s{v1."#{field}"} end) + field_names = Enum.map_join(types, ",", fn {field, _} -> ~s{"#{field}"} end) + assert query == - ~s{SELECT v1."bid", v1."num" } <> - ~s{FROM (VALUES ($1::uuid,$2::bigint),($3::uuid,$4::bigint)) AS v0 ("bid","num") } <> - ~s{INNER JOIN (VALUES ($5::uuid,$6::bigint),($7::uuid,$8::bigint)) AS v1 ("bid","num") ON v0."bid" = v1."bid" } <> + ~s{SELECT #{select_fields} } <> + ~s{FROM (#{from_values_text}) AS v0 (#{field_names}) } <> + ~s{INNER JOIN (#{join_values_text}) AS v1 (#{field_names}) ON v0."bid" = v1."bid" } <> ~s{WHERE (v0."num" = $9)} end @@ -1906,9 +1915,13 @@ defmodule Ecto.Adapters.PostgresTest do |> plan(:delete_all) |> delete_all() + cast_types = %{bid: "uuid", num: "bigint"} + values_text = values_text(values, cast_types, 1) + fields = Enum.map_join(types, ",", fn {field, _} -> ~s{"#{field}"} end) + assert query == ~s{DELETE FROM "schema" AS s0 } <> - ~s{USING (VALUES ($1::uuid,$2::bigint),($3::uuid,$4::bigint)) AS v1 ("bid","num") } <> + ~s{USING (#{values_text}) AS v1 (#{fields}) } <> ~s{WHERE (s0."x" = v1."num") AND (v1."num" = $5)} end @@ -1927,12 +1940,36 @@ defmodule Ecto.Adapters.PostgresTest do |> plan(:update_all) |> update_all() + cast_types = %{bid: "uuid", num: "bigint"} + values_text = values_text(values, cast_types, 1) + fields = Enum.map_join(types, ",", fn {field, _} -> ~s{"#{field}"} end) + assert query == ~s{UPDATE "schema" AS s0 SET "y" = v1."num" } <> - ~s{FROM (VALUES ($1::uuid,$2::bigint),($3::uuid,$4::bigint)) AS v1 ("bid","num") } <> + ~s{FROM (#{values_text}) AS v1 (#{fields}) } <> ~s{WHERE (s0."x" = v1."num") AND (v1."num" = $5)} end + defp values_text(values, types, ix) do + types = Map.to_list(types) + + [ + "VALUES ", + intersperse_reduce(values, ?,, ix, fn _, ix -> + {value, ix} = values_expr(types, ix) + {[?(, value, ?)], ix} + end) + |> elem(0) + ] + |> IO.iodata_to_binary() + end + + defp values_expr(types, ix) do + intersperse_reduce(types, ?,, ix, fn {field, _}, ix -> + {[?$, Integer.to_string(ix), ?:, ?: | types[field]], ix + 1} + end) + end + # DDL alias Ecto.Migration.Reference @@ -2231,14 +2268,17 @@ defmodule Ecto.Adapters.PostgresTest do end test "create table with a map column, and a map default with values" do + default = %{foo: "bar", baz: "boom"} + default_text = "'{" <> Enum.map_join(default, ",", fn {k, v} -> ~s{"#{k}":"#{v}"} end) <> "}'" + create = {:create, table(:posts), [ - {:add, :a, :map, [default: %{foo: "bar", baz: "boom"}]} + {:add, :a, :map, [default: default]} ]} assert execute_ddl(create) == [ - ~s|CREATE TABLE "posts" ("a" jsonb DEFAULT '{"baz":"boom","foo":"bar"}')| + ~s|CREATE TABLE "posts" ("a" jsonb DEFAULT #{default_text})| ] end diff --git a/test/ecto/adapters/tds_test.exs b/test/ecto/adapters/tds_test.exs index 6fd3a7d2..b9e344c6 100644 --- a/test/ecto/adapters/tds_test.exs +++ b/test/ecto/adapters/tds_test.exs @@ -1,7 +1,10 @@ +Code.require_file("../../support/connection_helpers.exs", __DIR__) + defmodule Ecto.Adapters.TdsTest do use ExUnit.Case, async: true import Ecto.Query + import Support.ConnectionHelpers alias Ecto.Queryable alias Ecto.Adapters.Tds.Connection, as: SQL @@ -1309,10 +1312,16 @@ defmodule Ecto.Adapters.TdsTest do |> plan() |> all() + cast_types = %{bid: "uniqueidentifier", num: "integer"} + from_values_text = values_text(values, cast_types, 1) + join_values_text = values_text(values, cast_types, 5) + select_fields = Enum.map_join(types, ", ", fn {field, _} -> "v1.[#{field}]" end) + field_names = Enum.map_join(types, ",", fn {field, _} -> "[#{field}]" end) + assert query == - ~s{SELECT v1.[bid], v1.[num] } <> - ~s{FROM (VALUES (CAST(@1 AS uniqueidentifier),CAST(@2 AS integer)),(CAST(@3 AS uniqueidentifier),CAST(@4 AS integer))) AS v0 ([bid],[num]) } <> - ~s{INNER JOIN (VALUES (CAST(@5 AS uniqueidentifier),CAST(@6 AS integer)),(CAST(@7 AS uniqueidentifier),CAST(@8 AS integer))) AS v1 ([bid],[num]) ON v0.[bid] = v1.[bid] } <> + ~s{SELECT #{select_fields} } <> + ~s{FROM (#{from_values_text}) AS v0 (#{field_names}) } <> + ~s{INNER JOIN (#{join_values_text}) AS v1 (#{field_names}) ON v0.[bid] = v1.[bid] } <> ~s{WHERE (v0.[num] = @9)} end @@ -1326,9 +1335,13 @@ defmodule Ecto.Adapters.TdsTest do |> plan(:delete_all) |> delete_all() + cast_types = %{bid: "uniqueidentifier", num: "integer"} + values_text = values_text(values, cast_types, 1) + fields = Enum.map_join(types, ",", fn {field, _} -> "[#{field}]" end) + assert query == ~s{DELETE s0 FROM [schema] AS s0 } <> - ~s{INNER JOIN (VALUES (CAST(@1 AS uniqueidentifier),CAST(@2 AS integer)),(CAST(@3 AS uniqueidentifier),CAST(@4 AS integer))) AS v1 ([bid],[num]) } <> + ~s{INNER JOIN (#{values_text}) AS v1 (#{fields}) } <> ~s{ON s0.[x] = v1.[num] WHERE (v1.[num] = @5)} end @@ -1347,12 +1360,36 @@ defmodule Ecto.Adapters.TdsTest do |> plan(:update_all) |> update_all() + cast_types = %{bid: "uniqueidentifier", num: "integer"} + values_text = values_text(values, cast_types, 1) + fields = Enum.map_join(types, ",", fn {field, _} -> "[#{field}]" end) + assert query == ~s{UPDATE s0 SET s0.[y] = v1.[num] FROM [schema] AS s0 } <> - ~s{INNER JOIN (VALUES (CAST(@1 AS uniqueidentifier),CAST(@2 AS integer)),(CAST(@3 AS uniqueidentifier),CAST(@4 AS integer))) AS v1 ([bid],[num]) } <> + ~s{INNER JOIN (#{values_text}) AS v1 (#{fields}) } <> ~s{ON s0.[x] = v1.[num] WHERE (v1.[num] = @5)} end + defp values_text(values, types, ix) do + types = Map.to_list(types) + + [ + "VALUES ", + intersperse_reduce(values, ?,, ix, fn _, ix -> + {value, ix} = values_expr(types, ix) + {[?(, value, ?)], ix} + end) + |> elem(0) + ] + |> IO.iodata_to_binary() + end + + defp values_expr(types, ix) do + intersperse_reduce(types, ?,, ix, fn {field, _}, ix -> + {["CAST(", ?@, Integer.to_string(ix), " AS ", types[field], ?)], ix + 1} + end) + end + ## DDL import Ecto.Migration, diff --git a/test/support/connection_helpers.exs b/test/support/connection_helpers.exs new file mode 100644 index 00000000..9358d624 --- /dev/null +++ b/test/support/connection_helpers.exs @@ -0,0 +1,19 @@ +defmodule Support.ConnectionHelpers do + @doc """ + Reduces and intersperses a list in one pass. + """ + def intersperse_reduce(list, separator, user_acc, reducer, acc \\ []) + + def intersperse_reduce([], _separator, user_acc, _reducer, acc), + do: {acc, user_acc} + + def intersperse_reduce([elem], _separator, user_acc, reducer, acc) do + {elem, user_acc} = reducer.(elem, user_acc) + {[acc | elem], user_acc} + end + + def intersperse_reduce([elem | rest], separator, user_acc, reducer, acc) do + {elem, user_acc} = reducer.(elem, user_acc) + intersperse_reduce(rest, separator, user_acc, reducer, [acc, elem, separator]) + end +end diff --git a/test/test_repo.exs b/test/support/test_repo.exs similarity index 100% rename from test/test_repo.exs rename to test/support/test_repo.exs diff --git a/test/test_helper.exs b/test/test_helper.exs index 1dc161b7..505fcd9d 100644 --- a/test/test_helper.exs +++ b/test/test_helper.exs @@ -4,7 +4,7 @@ Mix.shell(Mix.Shell.Process) System.put_env("ECTO_EDITOR", "") Logger.configure(level: :info) -Code.require_file("test_repo.exs", __DIR__) +Code.require_file("support/test_repo.exs", __DIR__) Code.require_file("../integration_test/support/file_helpers.exs", __DIR__) ExUnit.start()