Enable connection pooling by default for MemCacheStore and `RedisCacheStore`

This commit is contained in:
fatkodima 2022-06-02 22:21:35 +03:00
parent 2895c6b9a2
commit 799b5c1df4
11 changed files with 55 additions and 35 deletions

View File

@ -73,6 +73,7 @@ PATH
mini_mime (>= 1.1.0)
activesupport (7.1.0.alpha)
concurrent-ruby (~> 1.0, >= 1.0.2)
connection_pool (>= 2.2.5)
i18n (>= 1.6, < 2)
minitest (>= 5.1)
tzinfo (~> 2.0)

View File

@ -1,3 +1,13 @@
* Enable connection pooling by default for `MemCacheStore` and `RedisCacheStore`.
If you want to disable connection pooling, set `:pool` option to `false` when configuring the cache store:
```ruby
config.cache_store = :mem_cache_store, "cache.example.com", pool: false
```
*fatkodima*
* Add `force:` support to `ActiveSupport::Cache::Store#fetch_multi`.
*fatkodima*

View File

@ -37,5 +37,6 @@ Gem::Specification.new do |s|
s.add_dependency "i18n", ">= 1.6", "< 2"
s.add_dependency "tzinfo", "~> 2.0"
s.add_dependency "concurrent-ruby", "~> 1.0", ">= 1.0.2"
s.add_dependency "connection_pool", ">= 2.2.5"
s.add_dependency "minitest", ">= 5.1"
end

View File

@ -187,6 +187,10 @@ module ActiveSupport
private_constant :DEFAULT_POOL_OPTIONS
def retrieve_pool_options(options)
unless options.key?(:pool) || options.key?(:pool_size) || options.key?(:pool_timeout)
options[:pool] = true
end
if (pool_options = options.delete(:pool))
if Hash === pool_options
DEFAULT_POOL_OPTIONS.merge(pool_options)
@ -213,13 +217,6 @@ module ActiveSupport
end
end
end
def ensure_connection_pool_added!
require "connection_pool"
rescue LoadError => e
$stderr.puts "You don't have connection_pool installed in your application. Please add it to your Gemfile and run bundle install"
raise e
end
end
# Creates a new cache. The options will be passed to any write method calls

View File

@ -7,6 +7,7 @@ rescue LoadError => e
raise e
end
require "connection_pool"
require "delegate"
require "active_support/core_ext/enumerable"
require "active_support/core_ext/array/extract_options"
@ -94,7 +95,6 @@ module ActiveSupport
if pool_options.empty?
Dalli::Client.new(addresses, options)
else
ensure_connection_pool_added!
ConnectionPool.new(pool_options) { Dalli::Client.new(addresses, options.merge(threadsafe: false)) }
end
end

View File

@ -15,6 +15,7 @@ begin
rescue LoadError
end
require "connection_pool"
require "active_support/core_ext/numeric/time"
require "active_support/digest"
@ -170,7 +171,6 @@ module ActiveSupport
pool_options = self.class.send(:retrieve_pool_options, redis_options)
if pool_options.any?
self.class.send(:ensure_connection_pool_added!)
::ConnectionPool.new(pool_options) { self.class.build_redis(**redis_options) }
else
self.class.build_redis(**redis_options)

View File

@ -32,7 +32,7 @@ module ConnectionPoolBehavior
threads = []
emulating_latency do
cache = ActiveSupport::Cache.lookup_store(*store, store_options)
cache = ActiveSupport::Cache.lookup_store(*store, store_options.merge(pool: false))
assert_nothing_raised do
# Default connection pool size is 5, assuming 10 will make sure that

View File

@ -29,7 +29,7 @@ class CacheStoreSettingTest < ActiveSupport::TestCase
def test_mem_cache_fragment_cache_store
assert_called_with(Dalli::Client, :new, [%w[localhost], { compress: false }]) do
store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost"
store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost", pool: false
assert_kind_of(ActiveSupport::Cache::MemCacheStore, store)
end
end
@ -53,14 +53,14 @@ class CacheStoreSettingTest < ActiveSupport::TestCase
def test_mem_cache_fragment_cache_store_with_multiple_servers
assert_called_with(Dalli::Client, :new, [%w[localhost 192.168.1.1], { compress: false }]) do
store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost", "192.168.1.1"
store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost", "192.168.1.1", pool: false
assert_kind_of(ActiveSupport::Cache::MemCacheStore, store)
end
end
def test_mem_cache_fragment_cache_store_with_options
assert_called_with(Dalli::Client, :new, [%w[localhost 192.168.1.1], { timeout: 10, compress: false }]) do
store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost", "192.168.1.1", namespace: "foo", timeout: 10
store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost", "192.168.1.1", namespace: "foo", timeout: 10, pool: false
assert_kind_of(ActiveSupport::Cache::MemCacheStore, store)
assert_equal "foo", store.options[:namespace]
end

View File

@ -44,8 +44,8 @@ class MemCacheStoreTest < ActiveSupport::TestCase
end
end
def lookup_store(options = {})
cache = ActiveSupport::Cache.lookup_store(*store, { namespace: @namespace }.merge(options))
def lookup_store(*addresses, **options)
cache = ActiveSupport::Cache.lookup_store(:mem_cache_store, *addresses, { namespace: @namespace, pool: false }.merge(options))
(@_stores ||= []) << cache
cache
end
@ -220,28 +220,28 @@ class MemCacheStoreTest < ActiveSupport::TestCase
end
def test_unless_exist_expires_when_configured
cache = ActiveSupport::Cache.lookup_store(:mem_cache_store)
cache = lookup_store(namespace: nil)
assert_called_with client(cache), :add, [ "foo", Object, 1, Hash ] do
cache.write("foo", "bar", expires_in: 1, unless_exist: true)
end
end
def test_uses_provided_dalli_client_if_present
cache = ActiveSupport::Cache.lookup_store(:mem_cache_store, Dalli::Client.new("custom_host"))
cache = lookup_store(Dalli::Client.new("custom_host"))
assert_equal ["custom_host"], servers(cache)
end
def test_forwards_string_addresses_if_present
expected_addresses = ["first", "second"]
cache = ActiveSupport::Cache.lookup_store(:mem_cache_store, expected_addresses)
cache = lookup_store(expected_addresses)
assert_equal expected_addresses, servers(cache)
end
def test_falls_back_to_localhost_if_no_address_provided_and_memcache_servers_undefined
with_memcache_servers_environment_variable(nil) do
cache = ActiveSupport::Cache.lookup_store(:mem_cache_store)
cache = lookup_store
assert_equal ["127.0.0.1:11211"], servers(cache)
end
@ -249,7 +249,7 @@ class MemCacheStoreTest < ActiveSupport::TestCase
def test_falls_back_to_localhost_if_address_provided_as_nil
with_memcache_servers_environment_variable(nil) do
cache = ActiveSupport::Cache.lookup_store(:mem_cache_store, nil)
cache = lookup_store(nil)
assert_equal ["127.0.0.1:11211"], servers(cache)
end
@ -257,7 +257,7 @@ class MemCacheStoreTest < ActiveSupport::TestCase
def test_falls_back_to_localhost_if_no_address_provided_and_memcache_servers_defined
with_memcache_servers_environment_variable("custom_host") do
cache = ActiveSupport::Cache.lookup_store(:mem_cache_store)
cache = lookup_store
assert_equal ["custom_host"], servers(cache)
end
@ -324,6 +324,14 @@ class MemCacheStoreTest < ActiveSupport::TestCase
end
end
def test_connection_pooling_by_default
cache = ActiveSupport::Cache.lookup_store(:mem_cache_store)
pool = cache.instance_variable_get(:@data)
assert_kind_of ::ConnectionPool, pool
assert_equal 5, pool.size
assert_equal 5, pool.instance_variable_get(:@timeout)
end
private
def random_string(length)
(0...length).map { (65 + rand(26)).chr }.join

View File

@ -138,7 +138,7 @@ module ActiveSupport::Cache::RedisCacheStoreTests
private
def build(**kwargs)
ActiveSupport::Cache::RedisCacheStore.new(driver: DRIVER, **kwargs).tap(&:redis)
ActiveSupport::Cache::RedisCacheStore.new(driver: DRIVER, **kwargs.merge(pool: false)).tap(&:redis)
end
end
@ -156,7 +156,7 @@ module ActiveSupport::Cache::RedisCacheStoreTests
end
def lookup_store(options = {})
ActiveSupport::Cache.lookup_store(:redis_cache_store, { timeout: 0.1, namespace: @namespace, driver: DRIVER }.merge(options))
ActiveSupport::Cache.lookup_store(:redis_cache_store, { timeout: 0.1, namespace: @namespace, driver: DRIVER, pool: false }.merge(options))
end
teardown do
@ -300,6 +300,14 @@ module ActiveSupport::Cache::RedisCacheStoreTests
end
end
def test_connection_pooling_by_default
cache = ActiveSupport::Cache.lookup_store(:redis_cache_store)
pool = cache.redis
assert_kind_of ::ConnectionPool, pool
assert_equal 5, pool.size
assert_equal 5, pool.instance_variable_get(:@timeout)
end
private
def store
[:redis_cache_store]

View File

@ -395,27 +395,22 @@ There are some common options that can be used by all cache implementations. The
#### Connection Pool Options
By default the `MemCacheStore` and `RedisCacheStore` use a single connection
per process. This means that if you're using Puma, or another threaded server,
you can have multiple threads waiting for the connection to become available.
To increase the number of available connections you can enable connection
pooling.
By default the `MemCacheStore` and `RedisCacheStore` are configured to use
connection pooling. This means that if you're using Puma, or another threaded server,
you can have multiple threads performing queries to the cache store at the same time.
First, add the `connection_pool` gem to your Gemfile:
If you want to disable connection pooling, set `:pool` option to `false` when configuring the cache store:
```ruby
gem 'connection_pool'
config.cache_store = :mem_cache_store, "cache.example.com", pool: false
```
Next, set `:pool` option to `true` when configuring the cache store:
You can also override default pool settings by providing individual options to the `:pool` option:
```ruby
config.cache_store = :mem_cache_store, "cache.example.com", pool: true
config.cache_store = :mem_cache_store, "cache.example.com", pool: { size: 32, timeout: 1 }
```
You can also override default pool settings by providing individual options
instead of `true` to this option.
* `:size` - This option sets the number of connections per process (defaults to 5).
* `:timeout` - This option sets the number of seconds to wait for a connection (defaults to 5). If no connection is available within the timeout, a `Timeout::Error` will be raised.