Skip to content

Commit

Permalink
fix: deep merge changes into resolution context and plug/private to a…
Browse files Browse the repository at this point in the history
…void overriding things
  • Loading branch information
MikaAK committed Sep 14, 2023
1 parent 30ab229 commit 39fa31c
Show file tree
Hide file tree
Showing 8 changed files with 150 additions and 92 deletions.
13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,19 @@ The events will look like this:
}
```

##### Enable Error Caching
In order to enable error caching we can either setup `cached_errors` in our config
or as an option to `RequestCache.store` or `RequestCache.Middleware`.

The value of `cached_errors` can be one of `[]`, `:all` or a list of reason_atoms as
defined by `Plug.Conn.Status` such as `:not_found`, or `:internal_server_error`.

In REST this works off the response codes returned. However, in order to use reason_atoms in GraphQL
you will need to make sure your errors contain some sort of `%{code: "not_found"}` response in them

Take a look at [error_message](https://github.com/MikaAK/elixir_error_message) for a compatible error system


### Notes/Gotchas
- In order for this caching to work, we cannot be using POST requests as specced out by GraphQL, not for queries at least, fortunately this doesn't actually matter since we can use any http method we want (there will be a limit to query size), in a production app you may be doing this already due to the caching you gain from CloudFlare
- Caches for gql are stored via the name parameter that comes back from the query (for now) so you must name your queries to get caching
Expand Down
8 changes: 6 additions & 2 deletions lib/request_cache.ex
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@ defmodule RequestCache do
#{File.read!("./README.md")}
"""

@type opts :: [ttl: pos_integer, cache: module]
@type opts :: [
ttl: pos_integer,
cache: module,
cached_errors: :all | list(atom)
]

@spec store(conn :: Plug.Conn.t, opts_or_ttl :: opts | pos_integer) :: Plug.Conn.t
def store(conn, opts_or_ttl \\ [])
Expand All @@ -20,7 +24,7 @@ defmodule RequestCache do
RequestCache.Application.dependency_found?(:absinthe_plug) do
def store(result, opts_or_ttl) do
if RequestCache.Config.enabled?() do
RequestCache.ResolverMiddleware.store_result(result, opts_or_ttl)
RequestCache.Middleware.store_result(result, opts_or_ttl)
else
result
end
Expand Down
29 changes: 28 additions & 1 deletion lib/request_cache/middleware.ex
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ if absinthe_loaded? do
@impl Absinthe.Middleware
def call(%Absinthe.Resolution{} = resolution, opts) when is_list(opts) do
opts = ensure_valid_ttl(opts)

enable_cache_for_resolution(resolution, opts)
end

Expand All @@ -18,16 +19,19 @@ if absinthe_loaded? do
end

defp enable_cache_for_resolution(resolution, opts) do
resolution = resolve_resolver_func_middleware(resolution, opts)

if resolution.context[RequestCache.Config.conn_private_key()][:enabled?] do
if RequestCache.Config.verbose?() do
Util.verbose_log("[RequestCache.Middleware] Enabling cache for resolution")
end

%{resolution |
value: resolution.value || opts[:value],
context: Map.update!(
resolution.context,
RequestCache.Config.conn_private_key(),
&Keyword.merge(&1, [request: opts, cache_request?: true])
&Util.deep_merge(&1, request: opts, cache_request?: true)
)
}
else
Expand All @@ -37,9 +41,32 @@ if absinthe_loaded? do
end
end

defp resolve_resolver_func_middleware(resolution, opts) do
if resolver_middleware?(opts) do
%{resolution | state: :resolved}
else
resolution
end
end

defp resolver_middleware?(opts), do: opts[:value]

defp ensure_valid_ttl(opts) do
ttl = opts[:ttl] || RequestCache.Config.default_ttl()

Keyword.put(opts, :ttl, ttl)
end

@spec store_result(
result :: any,
opts_or_ttl :: RequestCache.opts | pos_integer
) :: {:middleware, module, RequestCache.opts}
def store_result(result, ttl) when is_integer(ttl) do
store_result(result, [ttl: ttl])
end

def store_result(result, opts) when is_list(opts) do
{:middleware, RequestCache.Middleware, Keyword.put(opts, :value, result)}
end
end
end
36 changes: 23 additions & 13 deletions lib/request_cache/plug.ex
Original file line number Diff line number Diff line change
Expand Up @@ -186,24 +186,26 @@ defmodule RequestCache.Plug do
end

defp response_error_and_cached?(%Plug.Conn{status: 200, request_path: path} = conn) when path in @graphql_paths do
gql_resp_success_or_has_known_error?(request_cache_cached_errors(conn), conn.resp_body)
empty_errors? = String.contains?(conn.resp_body, :binary.compile_pattern("\"errors\": []"))
no_errors? = !String.contains?(conn.resp_body, :binary.compile_pattern("\"errors\":"))

empty_errors? or

Check warning on line 192 in lib/request_cache/plug.ex

View check run for this annotation

Codecov / codecov/patch

lib/request_cache/plug.ex#L192

Added line #L192 was not covered by tests
no_errors? or
gql_resp_has_known_error?(request_cache_cached_errors(conn), conn.resp_body)
end

defp response_error_and_cached?(%Plug.Conn{status: status} = conn) do
conn.private |> IO.inspect
cached_error_codes = request_cache_cached_errors(conn) |> IO.inspect
cached_error_codes = request_cache_cached_errors(conn)

cached_error_codes !== [] and
(cached_error_codes === :all or Plug.Conn.Status.reason_atom(status) in cached_error_codes)
end

defp gql_resp_success_or_has_known_error?([], _resp_body), do: false
defp gql_resp_success_or_has_known_error?(:all, _resp_body), do: false

defp gql_resp_success_or_has_known_error?(cached_errors, resp_body) do
empty_errors? = String.contains?(resp_body, :binary.compile_pattern("\"errors\": []"))
defp gql_resp_has_known_error?([], _resp_body), do: false
defp gql_resp_has_known_error?(:all, _resp_body), do: true

empty_errors? or Enum.any?(cached_errors, &(resp_body =~ to_string(&1)))
defp gql_resp_has_known_error?(cached_errors, resp_body) do
Enum.any?(cached_errors, &(resp_body =~ ~r/"code" ?: ?"#{&1}"/))
end

defp conn_request(%Plug.Conn{} = conn) do
Expand All @@ -220,20 +222,22 @@ defmodule RequestCache.Plug do
context = conn.private[:absinthe][:context] || %{}

conn
|> Plug.Conn.put_private(conn_private_key(), enabled?: true)
|> Absinthe.Plug.put_options(context: Map.put(context, conn_private_key(), enabled?: true))
|> deep_merge_to_private(enabled?: true)
|> Absinthe.Plug.put_options(
context: Util.deep_merge(context, %{conn_private_key() => [enabled?: true]})
)
end
else
defp enable_request_cache_for_conn(conn) do
Plug.Conn.put_private(conn, conn_private_key(), enabled?: true)
deep_merge_to_private(conn, enabled?: true)
end
end

def store_request(conn, opts) when is_list(opts) do
if conn.private[conn_private_key()][:enabled?] do
Util.verbose_log("[RequestCache.Plug] Storing REST request in #{conn_private_key()}")

Plug.Conn.put_private(conn, conn_private_key(),
deep_merge_to_private(conn,
cache_request?: true,
request: opts
)
Expand All @@ -252,6 +256,12 @@ defmodule RequestCache.Plug do
RequestCache.Config.conn_private_key()
end

defp deep_merge_to_private(conn, params) do
conn.private[conn_private_key()]
|> Util.deep_merge(params)
|> then(&Plug.Conn.put_private(conn, conn_private_key(), &1))
end

defp log_error(error, conn, opts) do
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)

Expand Down
63 changes: 0 additions & 63 deletions lib/request_cache/resolver_middleware.ex

This file was deleted.

21 changes: 21 additions & 0 deletions lib/request_cache/util.ex
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ defmodule RequestCache.Util do

@moduledoc false

@whitelisted_modules [DateTime, NaiveDateTime, Date, Time, File.Stat, MapSet, Regex, URI, Version]


# def parse_gql_name(query_string) do
# case Regex.run(~r/^(?:query) ([^\({]+(?=\(|{))/, query_string, capture: :all_but_first) do
# [query_name] -> String.trim(query_name)
Expand All @@ -27,4 +30,22 @@ defmodule RequestCache.Util do
Logger.debug(message)
end
end

def deep_merge(list_a, list_b) when is_list(list_a) and is_list(list_b) do
Keyword.merge(list_a, list_b, fn
_k, _, %struct{} = right when struct in @whitelisted_modules -> right
_k, left, right when is_map(left) and is_map(right) -> deep_merge(left, right)

Check warning on line 37 in lib/request_cache/util.ex

View check run for this annotation

Codecov / codecov/patch

lib/request_cache/util.ex#L36-L37

Added lines #L36 - L37 were not covered by tests
_k, left, right when is_list(left) and is_list(right) -> deep_merge(left, right)
_, _, right -> right

Check warning on line 39 in lib/request_cache/util.ex

View check run for this annotation

Codecov / codecov/patch

lib/request_cache/util.ex#L39

Added line #L39 was not covered by tests
end)
end

def deep_merge(map_a, map_b) do
Map.merge(map_a, map_b, fn
_k, _, %struct{} = right when struct in @whitelisted_modules -> right
_k, left, right when is_map(left) and is_map(right) -> deep_merge(left, right)
_k, left, right when is_list(left) and is_list(right) -> deep_merge(left, right)
_, _, right -> right

Check warning on line 48 in lib/request_cache/util.ex

View check run for this annotation

Codecov / codecov/patch

lib/request_cache/util.ex#L45-L48

Added lines #L45 - L48 were not covered by tests
end)
end
end
2 changes: 1 addition & 1 deletion test/request_cache/con_cache_store_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ defmodule RequestCache.ConCacheStoreTest do
test "starts up properly" do
pid_name = :"test_#{Enum.random(1..100_000_000)}"

start_link_supervised!(RequestCache.ConCacheStore.child_spec(name: pid_name))
start_supervised!(RequestCache.ConCacheStore.child_spec(name: pid_name))

assert pid_name |> Process.whereis |> Process.alive?
end
Expand Down
70 changes: 58 additions & 12 deletions test/request_cache_absinthe_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,36 @@ defmodule RequestCacheAbsintheTest do
{:ok, "HelloError"}
end
end

field :uncached_error, :string do
middleware RequestCache.Middleware, cached_errors: []

resolve fn _, %{context: %{call_pid: pid}} ->
EnsureCalledOnlyOnce.call(pid)

{:error, %{code: :not_found, message: "TesT"}}
end
end

field :cached_all_error, :string do
arg :code, non_null(:string)

middleware RequestCache.Middleware, cached_errors: :all

resolve fn %{code: code}, %{context: %{call_pid: pid}} ->
EnsureCalledOnlyOnce.call(pid)
{:error, %{code: code, message: "TesT"}}
end
end

field :cached_not_found_error, :string do
middleware RequestCache.Middleware, cached_errors: [:not_found]

resolve fn _, %{context: %{call_pid: pid}} ->
EnsureCalledOnlyOnce.call(pid)
{:error, %{code: :not_found, message: "TesT"}}
end
end
end
end

Expand Down Expand Up @@ -75,6 +105,9 @@ defmodule RequestCacheAbsintheTest do
@query_2 "query Hello2 { helloWorld }"
@query_error "query HelloError { helloError }"
@uncached_query "query HelloUncached { uncachedHello }"
@uncached_error_query "query UncachedFound { uncachedError }"
@cached_all_error_query "query CachedAllFound { cachedAllError(code: \"not_found\") }"
@cached_not_found_error_query "query CachedNotFound { cachedNotFoundError }"

setup do
{:ok, pid} = EnsureCalledOnlyOnce.start_link()
Expand Down Expand Up @@ -102,16 +135,16 @@ defmodule RequestCacheAbsintheTest do
end

@tag capture_log: true
test "does not errors when error caching not enabled", %{call_pid: pid} do
test "does not cache errors when error caching not enabled", %{call_pid: pid} do
assert %Plug.Conn{} = :get
|> conn(graphql_url(@uncached_query))
|> conn(graphql_url(@uncached_error_query))
|> RequestCache.Support.Utils.ensure_default_opts()
|> Absinthe.Plug.put_options(context: %{call_pid: pid})
|> Router.call([])

assert_raise Plug.Conn.WrapperError, fn ->
conn = :get
|> conn(graphql_url(@uncached_query))
|> conn(graphql_url(@uncached_error_query))
|> RequestCache.Support.Utils.ensure_default_opts()
|> Absinthe.Plug.put_options(context: %{call_pid: pid})
|> Router.call([])
Expand All @@ -121,22 +154,35 @@ defmodule RequestCacheAbsintheTest do
end

@tag capture_log: true
test "caches errors when error caching enabled", %{call_pid: pid} do
test "caches errors when error caching set to :all", %{call_pid: pid} do
assert %Plug.Conn{} = :get
|> conn(graphql_url(@uncached_query))
|> RequestCache.Support.Utils.ensure_default_opts()
|> conn(graphql_url(@cached_all_error_query))
|> RequestCache.Support.Utils.ensure_default_opts(request: [cached_errors: :all])
|> Absinthe.Plug.put_options(context: %{call_pid: pid})
|> Router.call([])

assert_raise Plug.Conn.WrapperError, fn ->
conn = :get
|> conn(graphql_url(@uncached_query))
|> RequestCache.Support.Utils.ensure_default_opts()
assert ["HIT"] = :get
|> conn(graphql_url(@cached_all_error_query))
|> RequestCache.Support.Utils.ensure_default_opts(request: [cached_errors: :all])
|> Absinthe.Plug.put_options(context: %{call_pid: pid})
|> Router.call([])
|> get_resp_header(RequestCache.Plug.request_cache_header())
end

@tag capture_log: true
test "caches errors when error caching set to [:not_found]", %{call_pid: pid} do
assert %Plug.Conn{} = :get
|> conn(graphql_url(@cached_not_found_error_query))
|> RequestCache.Support.Utils.ensure_default_opts(request: [cached_errors: [:not_found]])
|> Absinthe.Plug.put_options(context: %{call_pid: pid})
|> Router.call([])

assert [] === get_resp_header(conn, RequestCache.Plug.request_cache_header())
end
assert ["HIT"] = :get
|> conn(graphql_url(@cached_not_found_error_query))
|> RequestCache.Support.Utils.ensure_default_opts(request: [cached_errors: [:not_found]])
|> Absinthe.Plug.put_options(context: %{call_pid: pid})
|> Router.call([])
|> get_resp_header(RequestCache.Plug.request_cache_header())
end

@tag capture_log: true
Expand Down

0 comments on commit 39fa31c

Please sign in to comment.