Skip to content

Commit

Permalink
feat: allow configuring errors to cache instead of caching all (#28)
Browse files Browse the repository at this point in the history
* feat: add tests for caching on error cases and increase test coverage

* chore: remove caching of errors and make it a tunable configuration to cache errors

* fix: deep merge changes into resolution context and plug/private to avoid overriding things

* fix: format unformatted code and fix telemetry tests causing log messages

* fix: up coverage requirements for project to 90%

* feat: add ability to whitelist specific query names when adding graphql middleware (#29)

* feat: add ability to whitelist specific query names when adding graphql middleware

* chore: add docs for whitelisting
  • Loading branch information
MikaAK authored Sep 26, 2023
1 parent 90f1f38 commit d6a0e10
Show file tree
Hide file tree
Showing 16 changed files with 501 additions and 141 deletions.
21 changes: 19 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,11 @@ For GraphQL endpoints it is possible to provide a list of atoms that will be pas
field :user, :user do
arg :id, non_null(:id)

middleware RequestCache.Middleware, ttl: :timer.seconds(60), cache: MyCacheModule, labels: [:service, :endpoint]
middleware RequestCache.Middleware,
ttl: :timer.seconds(60),
cache: MyCacheModule,
labels: [:service, :endpoint],
whitelisted_query_names: ["MyQueryName"] # By default all queries are cached, can also whitelist based off query name from GQL Document

resolve &Resolvers.User.find/2
end
Expand All @@ -188,9 +192,22 @@ The events will look like this:
}
```

##### Enable Error Caching
In order to enable error caching we can either setup `cached_errors` in our config
or as an option to `RequestCache.store` or `RequestCache.Middleware`.

The value of `cached_errors` can be one of `[]`, `:all` or a list of reason_atoms as
defined by `Plug.Conn.Status` such as `:not_found`, or `:internal_server_error`.

In REST this works off the response codes returned. However, in order to use reason_atoms in GraphQL
you will need to make sure your errors contain some sort of `%{code: "not_found"}` response in them

Take a look at [error_message](https://github.com/MikaAK/elixir_error_message) for a compatible error system


### Notes/Gotchas
- In order for this caching to work, we cannot be using POST requests as specced out by GraphQL, not for queries at least, fortunately this doesn't actually matter since we can use any http method we want (there will be a limit to query size), in a production app you may be doing this already due to the caching you gain from CloudFlare
- Caches for gql are stored via the name parameter that comes back from the query (for now) so you must name your queries to get caching
- Caches are stored via a MD5 hashed key that correlates to your query in GraphQL, or in REST your url path + query parameters
- Absinthe and ConCache are optional dependencies, if you don't have them you won't have access to `RequestCache.Middleware` or `RequestCache.ConCacheStore`
- If no ConCache is found, you must set `config :request_cache_module` to something else

Expand Down
2 changes: 1 addition & 1 deletion codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ coverage:
status:
project:
default:
target: 75%
target: 90%
threshold: 0%
base: auto
patch:
Expand Down
1 change: 1 addition & 0 deletions config/config.exs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import Config
config :request_cache_plug,
enabled?: true,
verbose?: false,
cached_errors: [],
graphql_paths: ["/graphiql", "/graphql"],
conn_priv_key: :__shared_request_cache__,
request_cache_module: RequestCache.ConCacheStore,
Expand Down
4 changes: 2 additions & 2 deletions coveralls.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"skip_files": ["lib/application.ex"],
"skip_files": ["lib/request_cache/application.ex"],
"custom_stop_words": ["defdelegate"],
"terminal_options": {"file_column_width": 80},
"coverage_options": {"treat_no_relevant_lines_as_covered": true, "minimum_coverage": 85}
"coverage_options": {"treat_no_relevant_lines_as_covered": true, "minimum_coverage": 90}
}

8 changes: 6 additions & 2 deletions lib/request_cache.ex
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@ defmodule RequestCache do
#{File.read!("./README.md")}
"""

@type opts :: [ttl: pos_integer, cache: module]
@type opts :: [
ttl: pos_integer,
cache: module,
cached_errors: :all | list(atom)
]

@spec store(conn :: Plug.Conn.t, opts_or_ttl :: opts | pos_integer) :: Plug.Conn.t
def store(conn, opts_or_ttl \\ [])
Expand All @@ -20,7 +24,7 @@ defmodule RequestCache do
RequestCache.Application.dependency_found?(:absinthe_plug) do
def store(result, opts_or_ttl) do
if RequestCache.Config.enabled?() do
RequestCache.ResolverMiddleware.store_result(result, opts_or_ttl)
RequestCache.Middleware.store_result(result, opts_or_ttl)
else
result
end
Expand Down
4 changes: 4 additions & 0 deletions lib/request_cache/config.ex
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ defmodule RequestCache.Config do
Application.get_env(@app, :conn_priv_key) || :__shared_request_cache__
end

def cached_errors do
Application.get_env(@app, :cached_errors) || []
end

def request_cache_module do
Application.get_env(@app, :request_cache_module) || RequestCache.ConCacheStore
end
Expand Down
54 changes: 46 additions & 8 deletions lib/request_cache/middleware.ex
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ if absinthe_loaded? do
@impl Absinthe.Middleware
def call(%Absinthe.Resolution{} = resolution, opts) when is_list(opts) do
opts = ensure_valid_ttl(opts)

enable_cache_for_resolution(resolution, opts)
end

Expand All @@ -17,17 +18,33 @@ if absinthe_loaded? do
enable_cache_for_resolution(resolution, ttl: ttl)
end

defp enable_cache_for_resolution(resolution, opts) do
defp ensure_valid_ttl(opts) do
ttl = opts[:ttl] || RequestCache.Config.default_ttl()

Keyword.put(opts, :ttl, ttl)
end

defp enable_cache_for_resolution(%Absinthe.Resolution{} = resolution, opts) do
resolution = resolve_resolver_func_middleware(resolution, opts)

if resolution.context[RequestCache.Config.conn_private_key()][:enabled?] do
if RequestCache.Config.verbose?() do
Util.verbose_log("[RequestCache.Middleware] Enabling cache for resolution")
end
Util.verbose_log("[RequestCache.Middleware] Enabling cache for resolution")

root_resolution_path_item = List.last(resolution.path)

cache_request? = !!root_resolution_path_item &&
root_resolution_path_item.schema_node.name === "RootQueryType" &&
query_name_whitelisted?(root_resolution_path_item.name, opts)

%{resolution |
value: resolution.value || opts[:value],
context: Map.update!(
resolution.context,
RequestCache.Config.conn_private_key(),
&Keyword.merge(&1, [request: opts, cache_request?: true])
&Util.deep_merge(&1,
request: opts,
cache_request?: cache_request?
)
)
}
else
Expand All @@ -37,9 +54,30 @@ if absinthe_loaded? do
end
end

defp ensure_valid_ttl(opts) do
ttl = opts[:ttl] || RequestCache.Config.default_ttl()
Keyword.put(opts, :ttl, ttl)
defp resolve_resolver_func_middleware(resolution, opts) do
if resolver_middleware?(opts) do
%{resolution | state: :resolved}
else
resolution
end
end

defp resolver_middleware?(opts), do: opts[:value]

defp query_name_whitelisted?(query_name, opts) do
is_nil(opts[:whitelisted_query_names]) or query_name in opts[:whitelisted_query_names]
end

@spec store_result(
result :: any,
opts_or_ttl :: RequestCache.opts | pos_integer
) :: {:middleware, module, RequestCache.opts}
def store_result(result, ttl) when is_integer(ttl) do
store_result(result, [ttl: ttl])
end

def store_result(result, opts) when is_list(opts) do
{:middleware, RequestCache.Middleware, Keyword.put(opts, :value, result)}
end
end
end
96 changes: 75 additions & 21 deletions lib/request_cache/plug.ex
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ defmodule RequestCache.Plug do
# This is compile time so we can check quicker
@graphql_paths RequestCache.Config.graphql_paths()
@request_cache_header "rc-cache-status"
@json_regex ~r/^(\[|\{)(.*|\n)*(\]|\})$/
@html_regex ~r/<!DOCTYPE\s+html>/i

def request_cache_header, do: @request_cache_header

Expand All @@ -36,13 +34,20 @@ defmodule RequestCache.Plug do
end
end

defp call_for_api_type(%Plug.Conn{request_path: path, method: "GET", query_string: query_string} = conn, opts) when path in @graphql_paths do
defp call_for_api_type(%Plug.Conn{
request_path: path,
method: "GET",
query_string: query_string
} = conn, opts) when path in @graphql_paths do
Util.verbose_log("[RequestCache.Plug] GraphQL query detected")

maybe_return_cached_result(conn, opts, path, query_string)
end

defp call_for_api_type(%Plug.Conn{request_path: path, method: "GET"} = conn, opts) when path not in @graphql_paths do
defp call_for_api_type(%Plug.Conn{
request_path: path,
method: "GET"
} = conn, opts) when path not in @graphql_paths do
Util.verbose_log("[RequestCache.Plug] REST path detected")

cache_key = rest_cache_key(conn)
Expand Down Expand Up @@ -108,8 +113,8 @@ defmodule RequestCache.Plug do
[_ | _] -> conn
[] ->
cond do
result =~ @json_regex -> Plug.Conn.put_resp_content_type(conn, "application/json")
result =~ @html_regex -> Plug.Conn.put_resp_content_type(conn, "text/html")
String.starts_with?(result, ["{", "["]) -> Plug.Conn.put_resp_content_type(conn, "application/json")
String.starts_with?(result, ["<"]) -> Plug.Conn.put_resp_content_type(conn, "text/html")

true -> conn
end
Expand All @@ -126,7 +131,7 @@ defmodule RequestCache.Plug do
Util.verbose_log("[RequestCache.Plug] Cache enabled before send, setting into cache...")
ttl = request_cache_ttl(new_conn, opts)

with :ok <- request_cache_module(new_conn, opts).put(cache_key, ttl, new_conn.resp_body) do
with :ok <- request_cache_module(new_conn, opts).put(cache_key, ttl, to_string(new_conn.resp_body)) do
Metrics.inc_cache_put(event_metadata(conn, cache_key, opts))

Util.verbose_log("[RequestCache.Plug] Successfully put #{cache_key} into cache\n#{new_conn.resp_body}")
Expand Down Expand Up @@ -162,12 +167,13 @@ defmodule RequestCache.Plug do
conn_request(conn)[:labels]
end

defp enabled_for_request?(%Plug.Conn{private: private}) do
plug_present? = get_in(private, [conn_private_key(), :enabled?]) ||
get_in(private, [:absinthe, :context, conn_private_key(), :enabled?])
defp request_cache_cached_errors(conn) do
conn_request(conn)[:cached_errors] || RequestCache.Config.cached_errors()
end

marked_for_cache? = get_in(private, [conn_private_key(), :cache_request?]) ||
get_in(private, [:absinthe, :context, conn_private_key(), :cache_request?])
defp enabled_for_request?(%Plug.Conn{} = conn) do
plug_present? = !!conn_private_key_item(conn, :enabled?)
marked_for_cache? = !!conn_private_key_item(conn, :cache_request?)

if plug_present? do
Util.verbose_log("[RequestCache.Plug] Plug enabled for request")
Expand All @@ -177,34 +183,76 @@ defmodule RequestCache.Plug do
Util.verbose_log("[RequestCache.Plug] Plug has been marked for cache")
end

plug_present? && marked_for_cache?
plug_present? && marked_for_cache? && response_error_and_cached?(conn)
end

defp response_error_and_cached?(%Plug.Conn{status: 200, request_path: path}) when path not in @graphql_paths do
true
end

defp response_error_and_cached?(%Plug.Conn{status: 200, request_path: path} = conn) when path in @graphql_paths do
empty_errors? = String.contains?(conn.resp_body, empty_errors_pattern())
no_errors? = !String.contains?(conn.resp_body, error_pattern())

empty_errors? or
no_errors? or
gql_resp_has_known_error?(request_cache_cached_errors(conn), conn.resp_body)
end

defp response_error_and_cached?(%Plug.Conn{status: status} = conn) do
cached_error_codes = request_cache_cached_errors(conn)

cached_error_codes !== [] and
(cached_error_codes === :all or Plug.Conn.Status.reason_atom(status) in cached_error_codes)
end

defp gql_resp_has_known_error?([], _resp_body), do: false
defp gql_resp_has_known_error?(:all, _resp_body), do: true

defp gql_resp_has_known_error?(cached_errors, resp_body) do
String.contains?(resp_body, error_codes_pattern(cached_errors))
end

def empty_errors_pattern, do: :binary.compile_pattern("\"errors\": []")
def error_pattern, do: :binary.compile_pattern("\"errors\":")

def error_codes_pattern(cached_errors) do
cached_errors
|> Enum.flat_map(&["code\":\"#{&1}", "code\" :\"#{&1}", "code\": \"#{&1}", "code\" : \"#{&1}"])
|> :binary.compile_pattern
end

defp conn_request(%Plug.Conn{private: private}) do
get_in(private, [conn_private_key(), :request])
|| get_in(private, [:absinthe, :context, conn_private_key(), :request])
|| []

defp conn_request(%Plug.Conn{} = conn) do
conn_private_key_item(conn, :request) || []
end

defp conn_private_key_item(%Plug.Conn{private: private}, name) do
get_in(private, [conn_private_key(), name])
|| get_in(private, [:absinthe, :context, conn_private_key(), name])
end

if RequestCache.Application.dependency_found?(:absinthe_plug) do
defp enable_request_cache_for_conn(conn) do
context = conn.private[:absinthe][:context] || %{}

conn
|> Plug.Conn.put_private(conn_private_key(), enabled?: true)
|> Absinthe.Plug.put_options(context: Map.put(context, conn_private_key(), enabled?: true))
|> deep_merge_to_private(enabled?: true)
|> Absinthe.Plug.put_options(
context: Util.deep_merge(context, %{conn_private_key() => [enabled?: true]})
)
end
else
defp enable_request_cache_for_conn(conn) do
Plug.Conn.put_private(conn, conn_private_key(), enabled?: true)
deep_merge_to_private(conn, enabled?: true)
end
end

def store_request(conn, opts) when is_list(opts) do
if conn.private[conn_private_key()][:enabled?] do
Util.verbose_log("[RequestCache.Plug] Storing REST request in #{conn_private_key()}")

Plug.Conn.put_private(conn, conn_private_key(),
deep_merge_to_private(conn,
cache_request?: true,
request: opts
)
Expand All @@ -223,6 +271,12 @@ defmodule RequestCache.Plug do
RequestCache.Config.conn_private_key()
end

defp deep_merge_to_private(conn, params) do
(conn.private[conn_private_key()] || [])
|> Util.deep_merge(params)
|> then(&Plug.Conn.put_private(conn, conn_private_key(), &1))
end

defp log_error(error, conn, opts) do
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)

Expand Down
Loading

0 comments on commit d6a0e10

Please sign in to comment.