summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniyal <abbasi.daniyal98@gmail.com>2021-05-24 05:31:50 +0530
committerMariusz Felisiak <felisiak.mariusz@gmail.com>2021-09-14 15:50:08 +0200
commitec212c66167759a2a40b13d5efc47d883816d4da (patch)
tree740bed1fef217361ef973c732045de73bb6f64d1
parent676bd084f2509f4201561d5c77ed4ecbd157bfa0 (diff)
Fixed #33012 -- Added Redis cache backend.
Thanks Carlton Gibson, Chris Jerdonek, David Smith, Keryn Knight, Mariusz Felisiak, and Nick Pope for reviews and mentoring this Google Summer of Code 2021 project.
-rw-r--r--django/core/cache/backends/redis.py224
-rw-r--r--docs/internals/contributing/writing-code/unit-tests.txt7
-rw-r--r--docs/ref/settings.txt5
-rw-r--r--docs/releases/4.0.txt10
-rw-r--r--docs/spelling_wordlist1
-rw-r--r--docs/topics/cache.txt93
-rw-r--r--tests/cache/tests.py67
-rw-r--r--tests/requirements/py3.txt1
8 files changed, 398 insertions, 10 deletions
diff --git a/django/core/cache/backends/redis.py b/django/core/cache/backends/redis.py
new file mode 100644
index 0000000000..16556b1ded
--- /dev/null
+++ b/django/core/cache/backends/redis.py
@@ -0,0 +1,224 @@
+"""Redis cache backend."""
+
+import random
+import re
+
+from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
+from django.core.serializers.base import PickleSerializer
+from django.utils.functional import cached_property
+from django.utils.module_loading import import_string
+
+
+class RedisSerializer(PickleSerializer):
+ def dumps(self, obj):
+ if isinstance(obj, int):
+ return obj
+ return super().dumps(obj)
+
+ def loads(self, data):
+ try:
+ return int(data)
+ except ValueError:
+ return super().loads(data)
+
+
+class RedisCacheClient:
+ def __init__(
+ self,
+ servers,
+ serializer=None,
+ db=None,
+ pool_class=None,
+ parser_class=None,
+ ):
+ import redis
+
+ self._lib = redis
+ self._servers = servers
+ self._pools = {}
+
+ self._client = self._lib.Redis
+
+ if isinstance(pool_class, str):
+ pool_class = import_string(pool_class)
+ self._pool_class = pool_class or self._lib.ConnectionPool
+
+ if isinstance(serializer, str):
+ serializer = import_string(serializer)
+ if callable(serializer):
+ serializer = serializer()
+ self._serializer = serializer or RedisSerializer()
+
+ if isinstance(parser_class, str):
+ parser_class = import_string(parser_class)
+ parser_class = parser_class or self._lib.connection.DefaultParser
+
+ self._pool_options = {'parser_class': parser_class, 'db': db}
+
+ def _get_connection_pool_index(self, write):
+ # Write to the first server. Read from other servers if there are more,
+ # otherwise read from the first server.
+ if write or len(self._servers) == 1:
+ return 0
+ return random.randint(1, len(self._servers) - 1)
+
+ def _get_connection_pool(self, write):
+ index = self._get_connection_pool_index(write)
+ if index not in self._pools:
+ self._pools[index] = self._pool_class.from_url(
+ self._servers[index], **self._pool_options,
+ )
+ return self._pools[index]
+
+ def get_client(self, key=None, *, write=False):
+ # key is used so that the method signature remains the same and custom
+ # cache client can be implemented which might require the key to select
+ # the server, e.g. sharding.
+ pool = self._get_connection_pool(write)
+ return self._client(connection_pool=pool)
+
+ def add(self, key, value, timeout):
+ client = self.get_client(key, write=True)
+ value = self._serializer.dumps(value)
+
+ if timeout == 0:
+ if ret := bool(client.set(key, value, nx=True)):
+ client.delete(key)
+ return ret
+ else:
+ return bool(client.set(key, value, ex=timeout, nx=True))
+
+ def get(self, key, default):
+ client = self.get_client(key)
+ value = client.get(key)
+ return default if value is None else self._serializer.loads(value)
+
+ def set(self, key, value, timeout):
+ client = self.get_client(key, write=True)
+ value = self._serializer.dumps(value)
+ if timeout == 0:
+ client.delete(key)
+ else:
+ client.set(key, value, ex=timeout)
+
+ def touch(self, key, timeout):
+ client = self.get_client(key, write=True)
+ if timeout is None:
+ return bool(client.persist(key))
+ else:
+ return bool(client.expire(key, timeout))
+
+ def delete(self, key):
+ client = self.get_client(key, write=True)
+ return bool(client.delete(key))
+
+ def get_many(self, keys):
+ client = self.get_client(None)
+ ret = client.mget(keys)
+ return {
+ k: self._serializer.loads(v) for k, v in zip(keys, ret) if v is not None
+ }
+
+ def has_key(self, key):
+ client = self.get_client(key)
+ return bool(client.exists(key))
+
+ def incr(self, key, delta):
+ client = self.get_client(key)
+ if not client.exists(key):
+ raise ValueError("Key '%s' not found." % key)
+ return client.incr(key, delta)
+
+ def set_many(self, data, timeout):
+ client = self.get_client(None, write=True)
+ pipeline = client.pipeline()
+ pipeline.mset({k: self._serializer.dumps(v) for k, v in data.items()})
+
+ if timeout is not None:
+ # Setting timeout for each key as redis does not support timeout
+ # with mset().
+ for key in data:
+ pipeline.expire(key, timeout)
+ pipeline.execute()
+
+ def delete_many(self, keys):
+ client = self.get_client(None, write=True)
+ client.delete(*keys)
+
+ def clear(self):
+ client = self.get_client(None, write=True)
+ return bool(client.flushdb())
+
+
+class RedisCache(BaseCache):
+ def __init__(self, server, params):
+ super().__init__(params)
+ if isinstance(server, str):
+ self._servers = re.split('[;,]', server)
+ else:
+ self._servers = server
+
+ self._class = RedisCacheClient
+ self._options = params.get('OPTIONS', {})
+
+ @cached_property
+ def _cache(self):
+ return self._class(self._servers, **self._options)
+
+ def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
+ if timeout == DEFAULT_TIMEOUT:
+ timeout = self.default_timeout
+ # The key will be made persistent if None used as a timeout.
+ # Non-positive values will cause the key to be deleted.
+ return None if timeout is None else max(0, int(timeout))
+
+ def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_and_validate_key(key, version=version)
+ return self._cache.add(key, value, self.get_backend_timeout(timeout))
+
+ def get(self, key, default=None, version=None):
+ key = self.make_and_validate_key(key, version=version)
+ return self._cache.get(key, default)
+
+ def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_and_validate_key(key, version=version)
+ self._cache.set(key, value, self.get_backend_timeout(timeout))
+
+ def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
+ key = self.make_and_validate_key(key, version=version)
+ return self._cache.touch(key, self.get_backend_timeout(timeout))
+
+ def delete(self, key, version=None):
+ key = self.make_and_validate_key(key, version=version)
+ return self._cache.delete(key)
+
+ def get_many(self, keys, version=None):
+ key_map = {self.make_and_validate_key(key, version=version): key for key in keys}
+ ret = self._cache.get_many(key_map.keys())
+ return {key_map[k]: v for k, v in ret.items()}
+
+ def has_key(self, key, version=None):
+ key = self.make_and_validate_key(key, version=version)
+ return self._cache.has_key(key)
+
+ def incr(self, key, delta=1, version=None):
+ key = self.make_and_validate_key(key, version=version)
+ return self._cache.incr(key, delta)
+
+ def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
+ safe_data = {}
+ for key, value in data.items():
+ key = self.make_and_validate_key(key, version=version)
+ safe_data[key] = value
+ self._cache.set_many(safe_data, self.get_backend_timeout(timeout))
+ return []
+
+ def delete_many(self, keys, version=None):
+ safe_keys = []
+ for key in keys:
+ key = self.make_and_validate_key(key, version=version)
+ safe_keys.append(key)
+ self._cache.delete_many(safe_keys)
+
+ def clear(self):
+ return self._cache.clear()
diff --git a/docs/internals/contributing/writing-code/unit-tests.txt b/docs/internals/contributing/writing-code/unit-tests.txt
index 6a5bd5ab8f..9bba72c451 100644
--- a/docs/internals/contributing/writing-code/unit-tests.txt
+++ b/docs/internals/contributing/writing-code/unit-tests.txt
@@ -285,6 +285,7 @@ dependencies:
* PyYAML_
* pytz_ (required)
* pywatchman_
+* redis_
* setuptools_
* memcached_, plus a :ref:`supported Python binding <memcached>`
* gettext_ (:ref:`gettext_on_windows`)
@@ -308,8 +309,9 @@ encounter.
You can also install the database adapter(s) of your choice using
``oracle.txt``, ``mysql.txt``, or ``postgres.txt``.
-If you want to test the memcached cache backend, you'll also need to define
-a :setting:`CACHES` setting that points at your memcached instance.
+If you want to test the memcached or Redis cache backends, you'll also need to
+define a :setting:`CACHES` setting that points at your memcached or Redis
+instance respectively.
To run the GeoDjango tests, you will need to :doc:`set up a spatial database
and install the Geospatial libraries</ref/contrib/gis/install/index>`.
@@ -332,6 +334,7 @@ service.
.. _PyYAML: https://pyyaml.org/wiki/PyYAML
.. _pytz: https://pypi.org/project/pytz/
.. _pywatchman: https://pypi.org/project/pywatchman/
+.. _redis: https://pypi.org/project/redis/
.. _setuptools: https://pypi.org/project/setuptools/
.. _memcached: https://memcached.org/
.. _gettext: https://www.gnu.org/software/gettext/manual/gettext.html
diff --git a/docs/ref/settings.txt b/docs/ref/settings.txt
index 12d89142d1..0c5b3fe307 100644
--- a/docs/ref/settings.txt
+++ b/docs/ref/settings.txt
@@ -153,6 +153,7 @@ The cache backend to use. The built-in cache backends are:
* ``'django.core.cache.backends.locmem.LocMemCache'``
* ``'django.core.cache.backends.memcached.PyMemcacheCache'``
* ``'django.core.cache.backends.memcached.PyLibMCCache'``
+* ``'django.core.cache.backends.redis.RedisCache'``
You can use a cache backend that doesn't ship with Django by setting
:setting:`BACKEND <CACHES-BACKEND>` to a fully-qualified path of a cache
@@ -162,6 +163,10 @@ backend class (i.e. ``mypackage.backends.whatever.WhateverCache``).
The ``PyMemcacheCache`` backend was added.
+.. versionchanged:: 4.0
+
+ The ``RedisCache`` backend was added.
+
.. setting:: CACHES-KEY_FUNCTION
``KEY_FUNCTION``
diff --git a/docs/releases/4.0.txt b/docs/releases/4.0.txt
index 709363a08f..540500af47 100644
--- a/docs/releases/4.0.txt
+++ b/docs/releases/4.0.txt
@@ -65,6 +65,16 @@ The new :ref:`scrypt password hasher <scrypt-usage>` is more secure and
recommended over PBKDF2. However, it's not the default as it requires OpenSSL
1.1+ and more memory.
+Redis cache backend
+-------------------
+
+The new ``django.core.cache.backends.redis.RedisCache`` cache backend provides
+built-in support for caching with Redis. `redis-py`_ 3.0.0 or higher is
+required. For more details, see the :ref:`documentation on caching with Redis
+in Django <redis>`.
+
+.. _`redis-py`: https://pypi.org/project/redis/
+
Minor features
--------------
diff --git a/docs/spelling_wordlist b/docs/spelling_wordlist
index bd8785cf67..8d9b17a43d 100644
--- a/docs/spelling_wordlist
+++ b/docs/spelling_wordlist
@@ -423,6 +423,7 @@ recomputation
recursed
redeclare
redirections
+redis
redisplay
redisplayed
redisplaying
diff --git a/docs/topics/cache.txt b/docs/topics/cache.txt
index 0f25260672..3bc35fd51d 100644
--- a/docs/topics/cache.txt
+++ b/docs/topics/cache.txt
@@ -62,7 +62,6 @@ settings file. Here's an explanation of all available values for
Memcached
---------
-The fastest, most efficient type of cache supported natively by Django,
Memcached__ is an entirely memory-based cache server, originally developed
to handle high loads at LiveJournal.com and subsequently open-sourced by
Danga Interactive. It is used by sites such as Facebook and Wikipedia to
@@ -169,6 +168,71 @@ particularly temporary.
some problems and seems to be unmaintained. Use ``PyMemcacheCache`` or
``PyLibMCCache`` instead.
+.. _redis:
+
+Redis
+-----
+
+.. versionadded:: 4.0
+
+Redis__ is an in-memory database that can be used for caching. To begin you'll
+need a Redis server running either locally or on a remote machine.
+
+__ https://redis.io/
+
+After setting up the Redis server, you'll need to install Python bindings for
+Redis. `redis-py`_ is the binding supported natively by Django. Installing the
+additional `hiredis-py`_ package is also recommended.
+
+.. _`redis-py`: https://pypi.org/project/redis/
+.. _`hiredis-py`: https://pypi.org/project/hiredis/
+
+To use Redis as your cache backend with Django:
+
+* Set :setting:`BACKEND <CACHES-BACKEND>` to
+ ``django.core.cache.backends.redis.RedisCache``.
+
+* Set :setting:`LOCATION <CACHES-LOCATION>` to the URL pointing to your Redis
+ instance, using the appropriate scheme. See the ``redis-py`` docs for
+ `details on the available schemes
+ <https://redis-py.readthedocs.io/en/stable/#redis.ConnectionPool.from_url>`_.
+
+For example, if Redis is running on localhost (127.0.0.1) port 6379::
+
+ CACHES = {
+ 'default': {
+ 'BACKEND': 'django.core.cache.backends.redis.RedisCache',
+ 'LOCATION': 'redis://127.0.0.1:6379',
+ }
+ }
+
+Often Redis servers are protected with authentication. In order to supply a
+username and password, add them in the ``LOCATION`` along with the URL::
+
+ CACHES = {
+ 'default': {
+ 'BACKEND': 'django.core.cache.backends.redis.RedisCache',
+ 'LOCATION': 'redis://username:password@127.0.0.1:6379',
+ }
+ }
+
+If you have multiple Redis servers set up in the replication mode, you can
+specify the servers either as a semicolon or comma delimited string, or as a
+list. While using multiple servers, write operations are performed on the first
+server (leader). Read operations are performed on the other servers (replicas)
+chosen at random::
+
+ CACHES = {
+ 'default': {
+ 'BACKEND': 'django.core.cache.backends.redis.RedisCache',
+ 'LOCATION': [
+ 'redis://127.0.0.1:6379', # leader
+ 'redis://127.0.0.1:6378', # read-replica 1
+ 'redis://127.0.0.1:6377', # read-replica 2
+ ],
+ }
+ }
+
.. _database-caching:
Database caching
@@ -422,9 +486,9 @@ behavior. These arguments are provided as additional keys in the
On some backends (``database`` in particular) this makes culling *much*
faster at the expense of more cache misses.
- Memcached backends pass the contents of :setting:`OPTIONS <CACHES-OPTIONS>`
- as keyword arguments to the client constructors, allowing for more advanced
- control of client behavior. For example usage, see below.
+ The Memcached and Redis backends pass the contents of :setting:`OPTIONS
+ <CACHES-OPTIONS>` as keyword arguments to the client constructors, allowing
+ for more advanced control of client behavior. For example usage, see below.
* :setting:`KEY_PREFIX <CACHES-KEY_PREFIX>`: A string that will be
automatically included (prepended by default) to all cache keys
@@ -496,6 +560,27 @@ flag on the connection's socket::
}
}
+Here's an example configuration for a ``redis`` based backend that selects
+database ``10`` (by default Redis ships with 16 logical databases), specifies a
+`parser class`_ (``redis.connection.HiredisParser`` will be used by default if
+the ``hiredis-py`` package is installed), and sets a custom `connection pool
+class`_ (``redis.ConnectionPool`` is used by default)::
+
+ CACHES = {
+ 'default': {
+ 'BACKEND': 'django.core.cache.backends.redis.RedisCache',
+ 'LOCATION': 'redis://127.0.0.1:6379',
+ 'OPTIONS': {
+ 'db': '10',
+ 'parser_class': 'redis.connection.PythonParser',
+ 'pool_class': 'redis.BlockingConnectionPool',
+ }
+ }
+ }
+
+.. _`parser class`: https://github.com/andymccurdy/redis-py#parsers
+.. _`connection pool class`: https://github.com/andymccurdy/redis-py#connection-pools
+
.. _the-per-site-cache:
The per-site cache
diff --git a/tests/cache/tests.py b/tests/cache/tests.py
index 0e2f5f7d1f..b880662858 100644
--- a/tests/cache/tests.py
+++ b/tests/cache/tests.py
@@ -22,6 +22,7 @@ from django.core.cache import (
caches,
)
from django.core.cache.backends.base import InvalidCacheBackendError
+from django.core.cache.backends.redis import RedisCacheClient
from django.core.cache.utils import make_template_fragment_key
from django.db import close_old_connections, connection, connections
from django.db.backends.utils import CursorWrapper
@@ -1373,10 +1374,9 @@ class LocMemCacheTests(BaseCacheTests, TestCase):
self.assertEqual(cache.get(9), 9)
-# memcached backend isn't guaranteed to be available.
-# To check the memcached backend, the test settings file will
-# need to contain at least one cache backend setting that points at
-# your memcache server.
+# memcached and redis backends aren't guaranteed to be available.
+# To check the backends, the test settings file will need to contain at least
+# one cache backend setting that points at your cache server.
configured_caches = {}
for _cache_params in settings.CACHES.values():
configured_caches[_cache_params['BACKEND']] = _cache_params
@@ -1387,6 +1387,11 @@ PyMemcacheCache_params = configured_caches.get('django.core.cache.backends.memca
# The memcached backends don't support cull-related options like `MAX_ENTRIES`.
memcached_excluded_caches = {'cull', 'zero_cull'}
+RedisCache_params = configured_caches.get('django.core.cache.backends.redis.RedisCache')
+
+# The redis backend does not support cull-related options like `MAX_ENTRIES`.
+redis_excluded_caches = {'cull', 'zero_cull'}
+
class BaseMemcachedTests(BaseCacheTests):
@@ -1727,6 +1732,60 @@ class FileBasedCacheTests(BaseCacheTests, TestCase):
self.assertIs(cache._is_expired(fh), True)
+@unittest.skipUnless(RedisCache_params, "Redis backend not configured")
+@override_settings(CACHES=caches_setting_for_tests(
+ base=RedisCache_params,
+ exclude=redis_excluded_caches,
+))
+class RedisCacheTests(BaseCacheTests, TestCase):
+
+ def setUp(self):
+ import redis
+ super().setUp()
+ self.lib = redis
+
+ @property
+ def incr_decr_type_error(self):
+ return self.lib.ResponseError
+
+ def test_cache_client_class(self):
+ self.assertIs(cache._class, RedisCacheClient)
+ self.assertIsInstance(cache._cache, RedisCacheClient)
+
+ def test_get_backend_timeout_method(self):
+ positive_timeout = 10
+ positive_backend_timeout = cache.get_backend_timeout(positive_timeout)
+ self.assertEqual(positive_backend_timeout, positive_timeout)
+
+ negative_timeout = -5
+ negative_backend_timeout = cache.get_backend_timeout(negative_timeout)
+ self.assertEqual(negative_backend_timeout, 0)
+
+ none_timeout = None
+ none_backend_timeout = cache.get_backend_timeout(none_timeout)
+ self.assertIsNone(none_backend_timeout)
+
+ def test_get_connection_pool_index(self):
+ pool_index = cache._cache._get_connection_pool_index(write=True)
+ self.assertEqual(pool_index, 0)
+ pool_index = cache._cache._get_connection_pool_index(write=False)
+ if len(cache._cache._servers) == 1:
+ self.assertEqual(pool_index, 0)
+ else:
+ self.assertGreater(pool_index, 0)
+ self.assertLess(pool_index, len(cache._cache._servers))
+
+ def test_get_connection_pool(self):
+ pool = cache._cache._get_connection_pool(write=True)
+ self.assertIsInstance(pool, self.lib.ConnectionPool)
+
+ pool = cache._cache._get_connection_pool(write=False)
+ self.assertIsInstance(pool, self.lib.ConnectionPool)
+
+ def test_get_client(self):
+ self.assertIsInstance(cache._cache.get_client(), self.lib.Redis)
+
+
class FileBasedCachePathLibTests(FileBasedCacheTests):
def mkdtemp(self):
tmp_dir = super().mkdtemp()
diff --git a/tests/requirements/py3.txt b/tests/requirements/py3.txt
index af06c4e5f6..893e47a914 100644
--- a/tests/requirements/py3.txt
+++ b/tests/requirements/py3.txt
@@ -15,6 +15,7 @@ python-memcached >= 1.59
pytz
pywatchman; sys.platform != 'win32'
PyYAML
+redis >= 3.0.0
selenium
sqlparse >= 0.2.2
tblib >= 1.5.0