Skip to content

Commit

Permalink
fix: deep merge changes into resolution context and plug/private to a…
Browse files Browse the repository at this point in the history
…void overriding things
  • Loading branch information
MikaAK committed Sep 14, 2023
1 parent 30ab229 commit 4b44628
Show file tree
Hide file tree
Showing 5 changed files with 58 additions and 15 deletions.
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,17 @@ The events will look like this:
}
```

##### Enable Error Caching
In order to enable error caching we can either setup `cached_errors` in our config
or as an option to `RequestCache.store` or `RequestCache.Middleware`.

The value of `cached_errors` can be one of `[]`, `:all` or a list of reason_atoms as
defined by `Plug.Conn.Status` such as `:not_found`, or `:internal_server_error`.

In REST this works off the response codes returned. However, in order to use reason_atoms in GraphQL
you will need to make sure your errors contain some sort of `%{code: "not_found"}` response in them


### Notes/Gotchas
- In order for this caching to work, we cannot be using POST requests as specced out by GraphQL, not for queries at least, fortunately this doesn't actually matter since we can use any http method we want (there will be a limit to query size), in a production app you may be doing this already due to the caching you gain from CloudFlare
- Caches for gql are stored via the name parameter that comes back from the query (for now) so you must name your queries to get caching
Expand Down
3 changes: 2 additions & 1 deletion lib/request_cache/middleware.ex
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ if absinthe_loaded? do
context: Map.update!(
resolution.context,
RequestCache.Config.conn_private_key(),
&Keyword.merge(&1, [request: opts, cache_request?: true])
&Util.deep_merge(&1, request: opts, cache_request?: true)
)
}
else
Expand All @@ -39,6 +39,7 @@ if absinthe_loaded? do

defp ensure_valid_ttl(opts) do
ttl = opts[:ttl] || RequestCache.Config.default_ttl()

Keyword.put(opts, :ttl, ttl)
end
end
Expand Down
36 changes: 23 additions & 13 deletions lib/request_cache/plug.ex
Original file line number Diff line number Diff line change
Expand Up @@ -186,24 +186,26 @@ defmodule RequestCache.Plug do
end

defp response_error_and_cached?(%Plug.Conn{status: 200, request_path: path} = conn) when path in @graphql_paths do
gql_resp_success_or_has_known_error?(request_cache_cached_errors(conn), conn.resp_body)
empty_errors? = String.contains?(conn.resp_body, :binary.compile_pattern("\"errors\": []"))
no_errors? = !String.contains?(conn.resp_body, :binary.compile_pattern("\"errors\":"))

empty_errors? or
no_errors? or
gql_resp_has_known_error?(request_cache_cached_errors(conn), conn.resp_body)
end

defp response_error_and_cached?(%Plug.Conn{status: status} = conn) do
conn.private |> IO.inspect
cached_error_codes = request_cache_cached_errors(conn) |> IO.inspect
cached_error_codes = request_cache_cached_errors(conn)

cached_error_codes !== [] and
(cached_error_codes === :all or Plug.Conn.Status.reason_atom(status) in cached_error_codes)
end

defp gql_resp_success_or_has_known_error?([], _resp_body), do: false
defp gql_resp_success_or_has_known_error?(:all, _resp_body), do: false

defp gql_resp_success_or_has_known_error?(cached_errors, resp_body) do
empty_errors? = String.contains?(resp_body, :binary.compile_pattern("\"errors\": []"))
defp gql_resp_has_known_error?([], _resp_body), do: false
defp gql_resp_has_known_error?(:all, _resp_body), do: false

empty_errors? or Enum.any?(cached_errors, &(resp_body =~ to_string(&1)))
defp gql_resp_has_known_error?(cached_errors, resp_body) do
Enum.any?(cached_errors, &(resp_body =~ ~r/"code" ?: ?"#{&1}"/))
end

defp conn_request(%Plug.Conn{} = conn) do
Expand All @@ -220,20 +222,22 @@ defmodule RequestCache.Plug do
context = conn.private[:absinthe][:context] || %{}

conn
|> Plug.Conn.put_private(conn_private_key(), enabled?: true)
|> Absinthe.Plug.put_options(context: Map.put(context, conn_private_key(), enabled?: true))
|> deep_merge_to_private(enabled?: true)
|> Absinthe.Plug.put_options(
context: Util.deep_merge(context, %{conn_private_key() => [enabled?: true]})
)
end
else
defp enable_request_cache_for_conn(conn) do
Plug.Conn.put_private(conn, conn_private_key(), enabled?: true)
deep_merge_to_private(conn, enabled?: true)
end
end

def store_request(conn, opts) when is_list(opts) do
if conn.private[conn_private_key()][:enabled?] do
Util.verbose_log("[RequestCache.Plug] Storing REST request in #{conn_private_key()}")

Plug.Conn.put_private(conn, conn_private_key(),
deep_merge_to_private(conn,
cache_request?: true,
request: opts
)
Expand All @@ -252,6 +256,12 @@ defmodule RequestCache.Plug do
RequestCache.Config.conn_private_key()
end

defp deep_merge_to_private(conn, params) do
conn.private[conn_private_key()]
|> Util.deep_merge(params)
|> then(&Plug.Conn.put_private(conn, conn_private_key(), &1))
end

defp log_error(error, conn, opts) do
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)

Expand Down
2 changes: 1 addition & 1 deletion lib/request_cache/resolver_middleware.ex
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ if absinthe_loaded? do
context: Map.update!(
resolution.context,
RequestCache.Config.conn_private_key(),
&Keyword.merge(&1, config)
&Util.deep_merge(&1, config)
)
}

Expand Down
21 changes: 21 additions & 0 deletions lib/request_cache/util.ex
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ defmodule RequestCache.Util do

@moduledoc false

@whitelisted_modules [DateTime, NaiveDateTime, Date, Time, File.Stat, MapSet, Regex, URI, Version]


# def parse_gql_name(query_string) do
# case Regex.run(~r/^(?:query) ([^\({]+(?=\(|{))/, query_string, capture: :all_but_first) do
# [query_name] -> String.trim(query_name)
Expand All @@ -27,4 +30,22 @@ defmodule RequestCache.Util do
Logger.debug(message)
end
end

def deep_merge(list_a, list_b) when is_list(list_a) and is_list(list_b) do
Keyword.merge(list_a, list_b, fn
_k, _, %struct{} = right when struct in @whitelisted_modules -> right
_k, left, right when is_map(left) and is_map(right) -> deep_merge(left, right)
_k, left, right when is_list(left) and is_list(right) -> deep_merge(left, right)
_, _, right -> right
end)
end

def deep_merge(map_a, map_b) do
Map.merge(map_a, map_b, fn
_k, _, %struct{} = right when struct in @whitelisted_modules -> right
_k, left, right when is_map(left) and is_map(right) -> deep_merge(left, right)
_k, left, right when is_list(left) and is_list(right) -> deep_merge(left, right)
_, _, right -> right
end)
end
end

0 comments on commit 4b44628

Please sign in to comment.