2017-08-01 15:11:48 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2018-10-15 14:01:25 -05:00
|
|
|
require "message_bus/distributed_cache"
|
2014-11-11 16:43:41 -06:00
|
|
|
|
2018-10-15 14:01:25 -05:00
|
|
|
class DistributedCache < MessageBus::DistributedCache
|
2017-10-20 03:39:31 -05:00
|
|
|
def initialize(key, manager: nil, namespace: true)
|
2018-10-15 14:01:25 -05:00
|
|
|
super(key, manager: manager, namespace: namespace, app_version: Discourse.git_version)
|
2014-11-11 16:43:41 -06:00
|
|
|
end
|
2021-06-02 02:46:48 -05:00
|
|
|
|
|
|
|
# Defer setting of the key in the cache for performance critical path to avoid
|
|
|
|
# waiting on MessageBus to publish the message which involves writing to Redis.
|
|
|
|
def defer_set(k, v)
|
|
|
|
Scheduler::Defer.later("#{@key}_set") { self[k] = v }
|
|
|
|
end
|
2021-06-03 01:02:40 -05:00
|
|
|
|
|
|
|
def defer_get_set(k, &block)
|
2022-02-17 08:52:14 -06:00
|
|
|
return self[k] if hash.key? k
|
2021-06-03 01:02:40 -05:00
|
|
|
value = block.call
|
|
|
|
self.defer_set(k, value)
|
|
|
|
value
|
|
|
|
end
|
2023-07-12 09:49:28 -05:00
|
|
|
|
2023-08-31 14:12:03 -05:00
|
|
|
def defer_get_set_bulk(ks, key_blk, &blk)
|
|
|
|
found_keys, missing_keys = ks.partition { |k| hash.key?(key_blk.call(k)) }
|
|
|
|
found_hash = found_keys.map { |key| [key, self[key_blk.call(key)]] }.to_h
|
|
|
|
|
|
|
|
if missing_keys.present?
|
|
|
|
missing_values = blk.call(missing_keys.freeze)
|
|
|
|
missing_hash = missing_keys.zip(missing_values).to_h
|
|
|
|
|
|
|
|
Scheduler::Defer.later("#{@key}_bulk_set") do
|
|
|
|
missing_hash.each { |key, value| self[key_blk.call(key)] = value }
|
|
|
|
end
|
|
|
|
|
|
|
|
ks.zip(missing_hash.merge(found_hash).values_at(*ks)).to_h
|
|
|
|
else
|
|
|
|
found_hash
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-07-12 09:49:28 -05:00
|
|
|
def clear(after_commit: true)
|
|
|
|
if after_commit && !GlobalSetting.skip_db?
|
|
|
|
DB.after_commit { super() }
|
|
|
|
else
|
|
|
|
super()
|
|
|
|
end
|
|
|
|
end
|
2014-11-11 16:43:41 -06:00
|
|
|
end
|