diff --git a/README.md b/README.md index 0b05bc3..6b6d249 100644 --- a/README.md +++ b/README.md @@ -188,6 +188,19 @@ The events will look like this: } ``` +##### Enable Error Caching +In order to enable error caching we can either setup `cached_errors` in our config +or as an option to `RequestCache.store` or `RequestCache.Middleware`. + +The value of `cached_errors` can be one of `[]`, `:all` or a list of reason_atoms as +defined by `Plug.Conn.Status` such as `:not_found`, or `:internal_server_error`. + +In REST this works off the response codes returned. However, in order to use reason_atoms in GraphQL +you will need to make sure your errors contain some sort of `%{code: "not_found"}` response in them + +Take a look at [error_message](https://github.com/MikaAK/elixir_error_message) for a compatible error system + + ### Notes/Gotchas - In order for this caching to work, we cannot be using POST requests as specced out by GraphQL, not for queries at least, fortunately this doesn't actually matter since we can use any http method we want (there will be a limit to query size), in a production app you may be doing this already due to the caching you gain from CloudFlare - Caches for gql are stored via the name parameter that comes back from the query (for now) so you must name your queries to get caching diff --git a/lib/request_cache.ex b/lib/request_cache.ex index 29c5555..2c70131 100644 --- a/lib/request_cache.ex +++ b/lib/request_cache.ex @@ -3,7 +3,11 @@ defmodule RequestCache do #{File.read!("./README.md")} """ - @type opts :: [ttl: pos_integer, cache: module] + @type opts :: [ + ttl: pos_integer, + cache: module, + cached_errors: :all | list(atom) + ] @spec store(conn :: Plug.Conn.t, opts_or_ttl :: opts | pos_integer) :: Plug.Conn.t def store(conn, opts_or_ttl \\ []) @@ -20,7 +24,7 @@ defmodule RequestCache do RequestCache.Application.dependency_found?(:absinthe_plug) do def store(result, opts_or_ttl) do if RequestCache.Config.enabled?() do - RequestCache.ResolverMiddleware.store_result(result, opts_or_ttl) + RequestCache.Middleware.store_result(result, opts_or_ttl) else result end diff --git a/lib/request_cache/middleware.ex b/lib/request_cache/middleware.ex index 74afe2a..1638667 100644 --- a/lib/request_cache/middleware.ex +++ b/lib/request_cache/middleware.ex @@ -9,6 +9,7 @@ if absinthe_loaded? do @impl Absinthe.Middleware def call(%Absinthe.Resolution{} = resolution, opts) when is_list(opts) do opts = ensure_valid_ttl(opts) + enable_cache_for_resolution(resolution, opts) end @@ -18,16 +19,19 @@ if absinthe_loaded? do end defp enable_cache_for_resolution(resolution, opts) do + resolution = resolve_resolver_func_middleware(resolution, opts) + if resolution.context[RequestCache.Config.conn_private_key()][:enabled?] do if RequestCache.Config.verbose?() do Util.verbose_log("[RequestCache.Middleware] Enabling cache for resolution") end %{resolution | + value: resolution.value || opts[:value], context: Map.update!( resolution.context, RequestCache.Config.conn_private_key(), - &Keyword.merge(&1, [request: opts, cache_request?: true]) + &Util.deep_merge(&1, request: opts, cache_request?: true) ) } else @@ -37,9 +41,32 @@ if absinthe_loaded? do end end + defp resolve_resolver_func_middleware(resolution, opts) do + if resolver_middleware?(opts) do + %{resolution | state: :resolved} + else + resolution + end + end + + defp resolver_middleware?(opts), do: opts[:value] + defp ensure_valid_ttl(opts) do ttl = opts[:ttl] || RequestCache.Config.default_ttl() + Keyword.put(opts, :ttl, ttl) end + + @spec store_result( + result :: any, + opts_or_ttl :: RequestCache.opts | pos_integer + ) :: {:middleware, module, RequestCache.opts} + def store_result(result, ttl) when is_integer(ttl) do + store_result(result, [ttl: ttl]) + end + + def store_result(result, opts) when is_list(opts) do + {:middleware, RequestCache.Middleware, Keyword.put(opts, :value, result)} + end end end diff --git a/lib/request_cache/plug.ex b/lib/request_cache/plug.ex index cf25ae8..0e98bd5 100644 --- a/lib/request_cache/plug.ex +++ b/lib/request_cache/plug.ex @@ -16,8 +16,6 @@ defmodule RequestCache.Plug do # This is compile time so we can check quicker @graphql_paths RequestCache.Config.graphql_paths() @request_cache_header "rc-cache-status" - @json_regex ~r/^(\[|\{)(.*|\n)*(\]|\})$/ - @html_regex ~r//i def request_cache_header, do: @request_cache_header @@ -108,8 +106,8 @@ defmodule RequestCache.Plug do [_ | _] -> conn [] -> cond do - result =~ @json_regex -> Plug.Conn.put_resp_content_type(conn, "application/json") - result =~ @html_regex -> Plug.Conn.put_resp_content_type(conn, "text/html") + String.starts_with?(result, ["{", "["]) -> Plug.Conn.put_resp_content_type(conn, "application/json") + String.starts_with?(result, ["<"]) -> Plug.Conn.put_resp_content_type(conn, "text/html") true -> conn end @@ -126,7 +124,7 @@ defmodule RequestCache.Plug do Util.verbose_log("[RequestCache.Plug] Cache enabled before send, setting into cache...") ttl = request_cache_ttl(new_conn, opts) - with :ok <- request_cache_module(new_conn, opts).put(cache_key, ttl, new_conn.resp_body) do + with :ok <- request_cache_module(new_conn, opts).put(cache_key, ttl, to_string(new_conn.resp_body)) do Metrics.inc_cache_put(event_metadata(conn, cache_key, opts)) Util.verbose_log("[RequestCache.Plug] Successfully put #{cache_key} into cache\n#{new_conn.resp_body}") @@ -186,26 +184,38 @@ defmodule RequestCache.Plug do end defp response_error_and_cached?(%Plug.Conn{status: 200, request_path: path} = conn) when path in @graphql_paths do - gql_resp_success_or_has_known_error?(request_cache_cached_errors(conn), conn.resp_body) + empty_errors? = String.contains?(conn.resp_body, empty_errors_pattern()) + no_errors? = !String.contains?(conn.resp_body, error_pattern()) + + empty_errors? or + no_errors? or + gql_resp_has_known_error?(request_cache_cached_errors(conn), conn.resp_body) end defp response_error_and_cached?(%Plug.Conn{status: status} = conn) do - conn.private |> IO.inspect - cached_error_codes = request_cache_cached_errors(conn) |> IO.inspect + cached_error_codes = request_cache_cached_errors(conn) cached_error_codes !== [] and (cached_error_codes === :all or Plug.Conn.Status.reason_atom(status) in cached_error_codes) end - defp gql_resp_success_or_has_known_error?([], _resp_body), do: false - defp gql_resp_success_or_has_known_error?(:all, _resp_body), do: false + defp gql_resp_has_known_error?([], _resp_body), do: false + defp gql_resp_has_known_error?(:all, _resp_body), do: true + + defp gql_resp_has_known_error?(cached_errors, resp_body) do + String.contains?(resp_body, error_codes_pattern(cached_errors)) + end - defp gql_resp_success_or_has_known_error?(cached_errors, resp_body) do - empty_errors? = String.contains?(resp_body, :binary.compile_pattern("\"errors\": []")) + def empty_errors_pattern, do: :binary.compile_pattern("\"errors\": []") + def error_pattern, do: :binary.compile_pattern("\"errors\":") - empty_errors? or Enum.any?(cached_errors, &(resp_body =~ to_string(&1))) + def error_codes_pattern(cached_errors) do + cached_errors + |> Enum.flat_map(&["code\":\"#{&1}", "code\" :\"#{&1}", "code\": \"#{&1}", "code\" : \"#{&1}"]) + |> :binary.compile_pattern end + defp conn_request(%Plug.Conn{} = conn) do conn_private_key_item(conn, :request) || [] end @@ -220,12 +230,14 @@ defmodule RequestCache.Plug do context = conn.private[:absinthe][:context] || %{} conn - |> Plug.Conn.put_private(conn_private_key(), enabled?: true) - |> Absinthe.Plug.put_options(context: Map.put(context, conn_private_key(), enabled?: true)) + |> deep_merge_to_private(enabled?: true) + |> Absinthe.Plug.put_options( + context: Util.deep_merge(context, %{conn_private_key() => [enabled?: true]}) + ) end else defp enable_request_cache_for_conn(conn) do - Plug.Conn.put_private(conn, conn_private_key(), enabled?: true) + deep_merge_to_private(conn, enabled?: true) end end @@ -233,7 +245,7 @@ defmodule RequestCache.Plug do if conn.private[conn_private_key()][:enabled?] do Util.verbose_log("[RequestCache.Plug] Storing REST request in #{conn_private_key()}") - Plug.Conn.put_private(conn, conn_private_key(), + deep_merge_to_private(conn, cache_request?: true, request: opts ) @@ -252,6 +264,12 @@ defmodule RequestCache.Plug do RequestCache.Config.conn_private_key() end + defp deep_merge_to_private(conn, params) do + (conn.private[conn_private_key()] || []) + |> Util.deep_merge(params) + |> then(&Plug.Conn.put_private(conn, conn_private_key(), &1)) + end + defp log_error(error, conn, opts) do {:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace) diff --git a/lib/request_cache/resolver_middleware.ex b/lib/request_cache/resolver_middleware.ex deleted file mode 100644 index c31f27f..0000000 --- a/lib/request_cache/resolver_middleware.ex +++ /dev/null @@ -1,63 +0,0 @@ -absinthe_loaded? = RequestCache.Application.dependency_found?(:absinthe) and - RequestCache.Application.dependency_found?(:absinthe_plug) - -if absinthe_loaded? do - defmodule RequestCache.ResolverMiddleware do - @moduledoc false - - alias RequestCache.Util - - @behaviour Absinthe.Middleware - - @type opts :: [ttl: pos_integer, cache: module, value: any] - - @impl Absinthe.Middleware - def call(%Absinthe.Resolution{} = resolution, opts) do - if RequestCache.Config.enabled?() do - enable_cache_for_resolution(resolution, opts) - else - resolution - end - end - - defp enable_cache_for_resolution(resolution, opts) do - if resolution.context[RequestCache.Config.conn_private_key()][:enabled?] do - config = [request: Keyword.delete(opts, :value), cache_request?: true] - - resolution = %{resolution | - state: :resolved, - value: opts[:value], - context: Map.update!( - resolution.context, - RequestCache.Config.conn_private_key(), - &Keyword.merge(&1, config) - ) - } - - Util.verbose_log("[RequestCache.ResolverMiddleware] Enabling cache for resolution") - - resolution - else - Util.log_cache_disabled_message() - - %{ - resolution | - state: :resolved, - value: opts[:value], - } - end - end - - @spec store_result( - result :: any, - opts_or_ttl :: opts | pos_integer - ) :: {:middleware, module, RequestCache.ResolverMiddleware.opts} - def store_result(result, ttl) when is_integer(ttl) do - store_result(result, [ttl: ttl]) - end - - def store_result(result, opts) when is_list(opts) do - {:middleware, RequestCache.ResolverMiddleware, Keyword.put(opts, :value, result)} - end - end -end diff --git a/lib/request_cache/util.ex b/lib/request_cache/util.ex index eaebd0a..c241251 100644 --- a/lib/request_cache/util.ex +++ b/lib/request_cache/util.ex @@ -3,6 +3,9 @@ defmodule RequestCache.Util do @moduledoc false + @whitelisted_modules [DateTime, NaiveDateTime, Date, Time, File.Stat, MapSet, Regex, URI, Version] + + # def parse_gql_name(query_string) do # case Regex.run(~r/^(?:query) ([^\({]+(?=\(|{))/, query_string, capture: :all_but_first) do # [query_name] -> String.trim(query_name) @@ -27,4 +30,22 @@ defmodule RequestCache.Util do Logger.debug(message) end end + + def deep_merge(list_a, list_b) when is_list(list_a) and is_list(list_b) do + Keyword.merge(list_a, list_b, fn + _k, _, %struct{} = right when struct in @whitelisted_modules -> right + _k, left, right when is_map(left) and is_map(right) -> deep_merge(left, right) + _k, left, right when is_list(left) and is_list(right) -> deep_merge(left, right) + _, _, right -> right + end) + end + + def deep_merge(map_a, map_b) do + Map.merge(map_a, map_b, fn + _k, _, %struct{} = right when struct in @whitelisted_modules -> right + _k, left, right when is_map(left) and is_map(right) -> deep_merge(left, right) + _k, left, right when is_list(left) and is_list(right) -> deep_merge(left, right) + _, _, right -> right + end) + end end diff --git a/test/request_cache/con_cache_store_test.exs b/test/request_cache/con_cache_store_test.exs index 806fc8b..cd5bd7e 100644 --- a/test/request_cache/con_cache_store_test.exs +++ b/test/request_cache/con_cache_store_test.exs @@ -39,7 +39,7 @@ defmodule RequestCache.ConCacheStoreTest do test "starts up properly" do pid_name = :"test_#{Enum.random(1..100_000_000)}" - start_link_supervised!(RequestCache.ConCacheStore.child_spec(name: pid_name)) + start_supervised!(RequestCache.ConCacheStore.child_spec(name: pid_name)) assert pid_name |> Process.whereis |> Process.alive? end diff --git a/test/request_cache/util_test.exs b/test/request_cache/util_test.exs new file mode 100644 index 0000000..1bf68cb --- /dev/null +++ b/test/request_cache/util_test.exs @@ -0,0 +1,44 @@ +defmodule RequestCache.UtilTest do + @moduledoc false + + use ExUnit.Case, async: true + + describe "&deep_merge/2" do + test "deep merges keywords with nested maps and keywords properly" do + date_time_a = DateTime.utc_now() + date_time_b = DateTime.add(DateTime.utc_now(), 100) + + assert [ + apple: %{ + a: 2, + b: 3, + c: 4, + date: date_time_b + }, + + banana: [a: 1, b: 2], + ] === RequestCache.Util.deep_merge( + [apple: %{a: 1, b: 3, date: date_time_a}, banana: [a: 1]], + [apple: %{a: 2, c: 4, date: date_time_b}, banana: [b: 2]] + ) + end + + test "deep merges maps with nested keywords and maps properly" do + date_time_a = DateTime.utc_now() + date_time_b = DateTime.add(DateTime.utc_now(), 100) + + assert %{ + banana: %{a: 1, b: 2}, + apple: [ + b: 3, + a: 2, + c: 4, + date: date_time_b + ] + } === RequestCache.Util.deep_merge( + %{apple: [a: 1, b: 3, date: date_time_a], banana: %{a: 1}}, + %{apple: [a: 2, c: 4, date: date_time_b], banana: %{b: 2}} + ) + end + end +end diff --git a/test/request_cache_absinthe_test.exs b/test/request_cache_absinthe_test.exs index 78c124e..976e783 100644 --- a/test/request_cache_absinthe_test.exs +++ b/test/request_cache_absinthe_test.exs @@ -39,6 +39,36 @@ defmodule RequestCacheAbsintheTest do {:ok, "HelloError"} end end + + field :uncached_error, :string do + middleware RequestCache.Middleware, cached_errors: [] + + resolve fn _, %{context: %{call_pid: pid}} -> + EnsureCalledOnlyOnce.call(pid) + + {:error, %{code: :not_found, message: "TesT"}} + end + end + + field :cached_all_error, :string do + arg :code, non_null(:string) + + middleware RequestCache.Middleware, cached_errors: :all + + resolve fn %{code: code}, %{context: %{call_pid: pid}} -> + EnsureCalledOnlyOnce.call(pid) + {:error, %{code: code, message: "TesT"}} + end + end + + field :cached_not_found_error, :string do + middleware RequestCache.Middleware, cached_errors: [:not_found] + + resolve fn _, %{context: %{call_pid: pid}} -> + EnsureCalledOnlyOnce.call(pid) + {:error, %{code: :not_found, message: "TesT"}} + end + end end end @@ -75,6 +105,9 @@ defmodule RequestCacheAbsintheTest do @query_2 "query Hello2 { helloWorld }" @query_error "query HelloError { helloError }" @uncached_query "query HelloUncached { uncachedHello }" + @uncached_error_query "query UncachedFound { uncachedError }" + @cached_all_error_query "query CachedAllFound { cachedAllError(code: \"not_found\") }" + @cached_not_found_error_query "query CachedNotFound { cachedNotFoundError }" setup do {:ok, pid} = EnsureCalledOnlyOnce.start_link() @@ -102,16 +135,16 @@ defmodule RequestCacheAbsintheTest do end @tag capture_log: true - test "does not errors when error caching not enabled", %{call_pid: pid} do + test "does not cache errors when error caching not enabled", %{call_pid: pid} do assert %Plug.Conn{} = :get - |> conn(graphql_url(@uncached_query)) + |> conn(graphql_url(@uncached_error_query)) |> RequestCache.Support.Utils.ensure_default_opts() |> Absinthe.Plug.put_options(context: %{call_pid: pid}) |> Router.call([]) assert_raise Plug.Conn.WrapperError, fn -> conn = :get - |> conn(graphql_url(@uncached_query)) + |> conn(graphql_url(@uncached_error_query)) |> RequestCache.Support.Utils.ensure_default_opts() |> Absinthe.Plug.put_options(context: %{call_pid: pid}) |> Router.call([]) @@ -121,22 +154,35 @@ defmodule RequestCacheAbsintheTest do end @tag capture_log: true - test "caches errors when error caching enabled", %{call_pid: pid} do + test "caches errors when error caching set to :all", %{call_pid: pid} do assert %Plug.Conn{} = :get - |> conn(graphql_url(@uncached_query)) - |> RequestCache.Support.Utils.ensure_default_opts() + |> conn(graphql_url(@cached_all_error_query)) + |> RequestCache.Support.Utils.ensure_default_opts(request: [cached_errors: :all]) |> Absinthe.Plug.put_options(context: %{call_pid: pid}) |> Router.call([]) - assert_raise Plug.Conn.WrapperError, fn -> - conn = :get - |> conn(graphql_url(@uncached_query)) - |> RequestCache.Support.Utils.ensure_default_opts() + assert ["HIT"] = :get + |> conn(graphql_url(@cached_all_error_query)) + |> RequestCache.Support.Utils.ensure_default_opts(request: [cached_errors: :all]) + |> Absinthe.Plug.put_options(context: %{call_pid: pid}) + |> Router.call([]) + |> get_resp_header(RequestCache.Plug.request_cache_header()) + end + + @tag capture_log: true + test "caches errors when error caching set to [:not_found]", %{call_pid: pid} do + assert %Plug.Conn{} = :get + |> conn(graphql_url(@cached_not_found_error_query)) + |> RequestCache.Support.Utils.ensure_default_opts(request: [cached_errors: [:not_found]]) |> Absinthe.Plug.put_options(context: %{call_pid: pid}) |> Router.call([]) - assert [] === get_resp_header(conn, RequestCache.Plug.request_cache_header()) - end + assert ["HIT"] = :get + |> conn(graphql_url(@cached_not_found_error_query)) + |> RequestCache.Support.Utils.ensure_default_opts(request: [cached_errors: [:not_found]]) + |> Absinthe.Plug.put_options(context: %{call_pid: pid}) + |> Router.call([]) + |> get_resp_header(RequestCache.Plug.request_cache_header()) end @tag capture_log: true