Skip to content

Commit 0baa069

Browse files
authored
Merge pull request #226 from MITLibraries/use-32-caching
Adds caching for timdex api queries
2 parents a86ecf7 + 9b1e897 commit 0baa069

File tree

2 files changed

+36
-6
lines changed

2 files changed

+36
-6
lines changed

app/controllers/search_controller.rb

Lines changed: 31 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,33 @@ def results
2121
# hand off enhanced query to builder
2222
query = QueryBuilder.new(@enhanced_query).query
2323

24+
# Create cache key for this query
25+
# Sorting query hash to ensure consistent key generation regardless of the parameter order
26+
sorted_query = query.sort_by { |k, v| k.to_sym }.to_h
27+
cache_key = Digest::MD5.hexdigest(sorted_query.to_s)
28+
2429
# builder hands off to wrapper which returns raw results here
30+
# We are using two difference caches to allow for Geo and USE to be cached separately. This ensures we don't have
31+
# cache key collission for these two different query types. In practice, the likelihood of this happening is low,
32+
# as the query parameters are different for each type and they won't often be run with the same cache backend other
33+
# than locally, but this is a safeguard.
34+
# The response type is a GraphQL::Client::Response, which is not directly serializable, so we convert it to a hash.
2535
response = if Flipflop.enabled?(:gdt)
26-
execute_geospatial_query(query)
36+
Rails.cache.fetch("#{cache_key}/geo", expires_in: 12.hours) do
37+
raw = execute_geospatial_query(query)
38+
{
39+
data: raw.data.to_h,
40+
errors: raw.errors.details.to_h
41+
}
42+
end
2743
else
28-
TimdexBase::Client.query(TimdexSearch::BaseQuery, variables: query)
44+
Rails.cache.fetch("#{cache_key}/use", expires_in: 12.hours) do
45+
raw = TimdexBase::Client.query(TimdexSearch::BaseQuery, variables: query)
46+
{
47+
data: raw.data.to_h,
48+
errors: raw.errors.details.to_h
49+
}
50+
end
2951
end
3052

3153
# Handle errors
@@ -59,11 +81,13 @@ def execute_geospatial_query(query)
5981
end
6082

6183
def extract_errors(response)
62-
response&.errors&.details&.to_h&.dig('data')
84+
response[:errors]['data'] if response.is_a?(Hash) && response.key?(:errors) && response[:errors].key?('data')
6385
end
6486

6587
def extract_filters(response)
66-
aggs = response&.data&.search&.to_h&.dig('aggregations')
88+
return unless response.is_a?(Hash) && response.key?(:data) && response[:data].key?('search')
89+
90+
aggs = response[:data]['search']['aggregations']
6791
return if aggs.blank?
6892

6993
aggs = reorder_filters(aggs, active_filters) unless active_filters.blank?
@@ -78,7 +102,9 @@ def extract_filters(response)
78102
end
79103

80104
def extract_results(response)
81-
response&.data&.search&.to_h&.dig('records')
105+
return unless response.is_a?(Hash) && response.key?(:data) && response[:data].key?('search')
106+
107+
response[:data]['search']['records']
82108
end
83109

84110
def reorder_filters(aggs, active_filters)

app/models/analyzer.rb

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,11 @@ def initialize(enhanced_query, response)
1515
private
1616

1717
def hits(response)
18-
response&.data&.search&.to_h&.dig('hits')
18+
return 0 if response.nil?
19+
return 0 unless response.is_a?(Hash) && response.key?(:data) && response[:data].key?('search')
20+
return 0 unless response[:data]['search'].is_a?(Hash) && response[:data]['search'].key?('hits')
21+
22+
response[:data]['search']['hits']
1923
end
2024

2125
def next_page(page, hits)

0 commit comments

Comments
 (0)