DEV: Implement a faster Discourse.cache

This is a bottom up rewrite of Discourse cache to support faster performance
and a limited surface area.

ActiveSupport::Cache::Store accepts many options we do not use, this partial
implementation only picks the bits out that we do use and want to support.

Additionally params are named which avoids typos such as "expires_at" vs "expires_in"

This also moves a few spots in Discourse to use Discourse.cache over setex
Performance of setex and Discourse.cache.write is similar.
This commit is contained in:
Sam Saffron 2019-11-27 16:11:49 +11:00
parent 0fb497eb23
commit 88ecb650a9
9 changed files with 127 additions and 29 deletions

View File

@ -112,7 +112,7 @@ class EmailController < ApplicationController
else
key = "unsub_#{SecureRandom.hex}"
$redis.setex key, 1.hour, user.email
Discourse.cache.write key, user.email, expires_in: 1.hour
url = path("/email/unsubscribed?key=#{key}")
if topic
@ -125,7 +125,7 @@ class EmailController < ApplicationController
end
def unsubscribed
@email = $redis.get(params[:key])
@email = Discourse.cache.read(params[:key])
@topic_id = params[:topic_id]
user = User.find_by_email(@email)
raise Discourse::NotFound unless user

View File

@ -27,21 +27,21 @@ class DiscourseSingleSignOn < SingleSignOn
def register_nonce(return_path)
if nonce
$redis.setex(nonce_key, SingleSignOn.nonce_expiry_time, return_path)
Discourse.cache.write(nonce_key, return_path, expires_in: SingleSignOn.nonce_expiry_time)
end
end
def nonce_valid?
nonce && $redis.get(nonce_key).present?
nonce && Discourse.cache.read(nonce_key).present?
end
def return_path
$redis.get(nonce_key) || "/"
Discourse.cache.read(nonce_key) || "/"
end
def expire_nonce!
if nonce
$redis.del nonce_key
Discourse.cache.delete nonce_key
end
end

View File

@ -163,7 +163,7 @@ class Report
end
def self.cache(report, duration)
Discourse.cache.write(cache_key(report), report.as_json, force: true, expires_in: duration)
Discourse.cache.write(cache_key(report), report.as_json, expires_in: duration)
end
def self.find(type, opts = nil)

View File

@ -1,8 +1,20 @@
# frozen_string_literal: true
# Discourse specific cache, enforces 1 day expiry
# Discourse specific cache, enforces 1 day expiry by default
class Cache < ActiveSupport::Cache::Store
# This is a bottom up implementation of ActiveSupport::Cache::Store
# this allows us to cleanly implement without using cache entries and version
# support which we do not use, in tern this makes the cache as fast as simply
# using `$redis.setex` with a more convenient API
#
# It only implements a subset of ActiveSupport::Cache::Store as we make no use
# of large parts of the interface.
#
# An additional advantage of this class is that all methods have named params
# Rails tends to use options hash for lots of stuff due to legacy reasons
# this makes it harder to reason about the API
class Cache
# nothing is cached for longer than 1 day EVER
# there is no reason to have data older than this clogging redis
@ -10,9 +22,14 @@ class Cache < ActiveSupport::Cache::Store
# pointless data
MAX_CACHE_AGE = 1.day unless defined? MAX_CACHE_AGE
def initialize(opts = {})
@namespace = opts[:namespace] || "_CACHE_"
super(opts)
# we don't need this feature, 1 day expiry is enough
# it makes lookups a tad cheaper
def self.supports_cache_versioning?
false
end
def initialize(namespace: "_CACHE")
@namespace = namespace
end
def redis
@ -33,30 +50,82 @@ class Cache < ActiveSupport::Cache::Store
end
end
def normalize_key(key, opts = nil)
def normalize_key(key)
"#{@namespace}:#{key}"
end
def exist?(name)
key = normalize_key(name)
redis.exists(key)
end
# this removes a bunch of stuff we do not need like instrumentation and versioning
def read(name)
key = normalize_key(name)
read_entry(key)
end
def write(name, value, expires_in: nil)
write_entry(normalize_key(name), value, expires_in: nil)
end
def delete(name)
redis.del(normalize_key(name))
end
def fetch(name, expires_in: nil, force: nil, &blk)
if block_given?
key = normalize_key(name)
raw = nil
if !force
raw = redis.get(key)
end
if raw
begin
Marshal.load(raw)
rescue => e
log_first_exception(e)
end
else
val = blk.call
write_entry(key, val, expires_in: expires_in)
val
end
elsif force
raise ArgumentError, "Missing block: Calling `Cache#fetch` with `force: true` requires a block."
else
read(name)
end
end
protected
def read_entry(key, options)
if data = redis.get(key)
data = Marshal.load(data)
ActiveSupport::Cache::Entry.new data
def log_first_exception(e)
if !defined? @logged_a_warning
@logged_a_warning = true
Discourse.warn_exception(e, "Corrupt cache... skipping entry for key #{key}")
end
rescue
# corrupt cache, fail silently for now, remove rescue later
end
def write_entry(key, entry, options)
dumped = Marshal.dump(entry.value)
expiry = options[:expires_in] || MAX_CACHE_AGE
def read_entry(key)
if data = redis.get(key)
Marshal.load(data)
end
rescue => e
# corrupt cache, this can happen if Marshal version
# changes. Log it once so we can tell it is happening.
# should not happen under any normal circumstances, but we
# do not want to flood logs
log_first_exception(e)
end
def write_entry(key, value, expires_in: nil)
dumped = Marshal.dump(value)
expiry = expires_in || MAX_CACHE_AGE
redis.setex(key, expiry, dumped)
true
end
def delete_entry(key, options)
redis.del key
end
end

View File

@ -336,7 +336,7 @@ module DiscourseTagging
end
def self.staff_tag_names
tag_names = Discourse.cache.read(TAGS_STAFF_CACHE_KEY, tag_names)
tag_names = Discourse.cache.read(TAGS_STAFF_CACHE_KEY)
if !tag_names
tag_names = Tag.joins(tag_groups: :tag_group_permissions)

View File

@ -79,3 +79,17 @@ end
# redis get string marshal: 12789.2 i/s - same-ish: difference falls within error
# Rails read cache string: 10486.4 i/s - 1.25x slower
# Discourse read cache string: 10457.1 i/s - 1.26x slower
#
# After Cache re-write
#
# Comparison:
# redis setex string: 13390.9 i/s
# redis setex marshal string: 13202.0 i/s - same-ish: difference falls within error
# Discourse cache string: 12406.5 i/s - same-ish: difference falls within error
# Rails cache string: 12289.2 i/s - same-ish: difference falls within error
#
# Comparison:
# redis get string: 13589.6 i/s
# redis get string marshal: 13118.3 i/s - same-ish: difference falls within error
# Rails read cache string: 12482.2 i/s - same-ish: difference falls within error
# Discourse read cache string: 12296.8 i/s - same-ish: difference falls within error

View File

@ -9,6 +9,12 @@ describe Cache do
Cache.new
end
it "supports exist?" do
cache.write("testing", 1.1)
expect(cache.exist?("testing")).to eq(true)
expect(cache.exist?(SecureRandom.hex)).to eq(false)
end
it "supports float" do
cache.write("float", 1.1)
expect(cache.read("float")).to eq(1.1)
@ -36,10 +42,14 @@ describe Cache do
end
it "can delete correctly" do
cache.delete("key")
cache.fetch("key", expires_in: 1.minute) do
"test"
end
expect(cache.fetch("key")).to eq("test")
cache.delete("key")
expect(cache.fetch("key")).to eq(nil)
end
@ -69,6 +79,7 @@ describe Cache do
r = cache.fetch "key" do
"bob"
end
expect(r).to eq("bob")
end

View File

@ -44,6 +44,9 @@ describe "Redis Store" do
end
it "can be cleared without clearing our cache" do
cache.clear
store.clear
store.fetch "key" do
"key in store"
end
@ -53,6 +56,7 @@ describe "Redis Store" do
end
store.clear
expect(store.read("key")).to eq(nil)
expect(cache.fetch("key")).to eq("key in cache")

View File

@ -151,7 +151,7 @@ RSpec.describe EmailController do
describe 'when topic is public' do
it 'should return the right response' do
key = SecureRandom.hex
$redis.set(key, user.email)
Discourse.cache.write(key, user.email)
get '/email/unsubscribed', params: { key: key, topic_id: topic.id }
expect(response.status).to eq(200)
expect(response.body).to include(topic.title)
@ -161,7 +161,7 @@ RSpec.describe EmailController do
describe 'when topic is private' do
it 'should return the right response' do
key = SecureRandom.hex
$redis.set(key, user.email)
Discourse.cache.write(key, user.email)
get '/email/unsubscribed', params: { key: key, topic_id: private_topic.id }
expect(response.status).to eq(200)
expect(response.body).to_not include(private_topic.title)