Skip to content

Commit

Permalink
Merge pull request #11868 from rtibbles/no_redis_cache_no_problem
Browse files Browse the repository at this point in the history
Ensure that Kolibri can start regardless of whether redis and redis cache is installed.
  • Loading branch information
rtibbles authored Feb 13, 2024
2 parents e7c4633 + 840b416 commit 2837e15
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 13 deletions.
28 changes: 17 additions & 11 deletions kolibri/core/utils/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from django.core.cache import caches
from django.core.cache import InvalidCacheBackendError
from django.utils.functional import SimpleLazyObject
from redis_cache import RedisCache as BaseRedisCache


logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -64,13 +63,20 @@ def save(self):
self.changed = False


class RedisCache(BaseRedisCache):
def set(self, *args, **kwargs):
"""
Overwrite the set method to not return a value, in line with the Django cache interface
This causes particular issues for Django's caching middleware, which expects the set method to return None
as it invokes it directly in a lambda in the response.add_post_render_callback method
We use a similar pattern in our own caching decorator in kolibri/core/content/api.py and saw errors
due to the fact if the lambda returns a value, it is interpreted as a replacement for the response object.
"""
super(RedisCache, self).set(*args, **kwargs)
try:
from redis_cache import RedisCache as BaseRedisCache

class RedisCache(BaseRedisCache):
def set(self, *args, **kwargs):
"""
Overwrite the set method to not return a value, in line with the Django cache interface
This causes particular issues for Django's caching middleware, which expects the set method to return None
as it invokes it directly in a lambda in the response.add_post_render_callback method
We use a similar pattern in our own caching decorator in kolibri/core/content/api.py and saw errors
due to the fact if the lambda returns a value, it is interpreted as a replacement for the response object.
"""
super(RedisCache, self).set(*args, **kwargs)


except (ImportError, InvalidCacheBackendError):
pass
27 changes: 25 additions & 2 deletions kolibri/utils/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from six.moves.urllib.parse import urlparse
from six.moves.urllib.parse import urlunparse
from validate import is_boolean
from validate import is_option
from validate import Validator
from validate import VdtTypeError
from validate import VdtValueError
Expand Down Expand Up @@ -271,6 +272,28 @@ def multiprocess_bool(value):
return False


def cache_option(value):
"""
Validate the cache options.
Do this by checking it's an allowed option, and also that redis cache
can be imported properly on this platform.
"""
value = is_option(value, "memory", "redis")
try:
if value == "redis":
# Check that we can properly import our RedisCache
# implementation, to ensure that we can use it.
# Also ensure that the redis package is installed.
from kolibri.core.utils.cache import RedisCache # noqa
import redis # noqa
return value
except ImportError:
logger.error(
"Redis cache backend is not available, are Redis packages installed?"
)
raise VdtValueError(value)


class LazyImportFunction(object):
"""
A function wrapper that will import a module when called.
Expand Down Expand Up @@ -339,8 +362,7 @@ def lazy_import_callback_list(value):
base_option_spec = {
"Cache": {
"CACHE_BACKEND": {
"type": "option",
"options": ("memory", "redis"),
"type": "cache_option",
"default": "memory",
"description": """
Which backend to use for the main cache - if 'memory' is selected, then for most cache operations,
Expand Down Expand Up @@ -716,6 +738,7 @@ def _get_validator():
"url_prefix": url_prefix,
"bytes": validate_bytes,
"multiprocess_bool": multiprocess_bool,
"cache_option": cache_option,
"lazy_import_callback_list": lazy_import_callback_list,
}
)
Expand Down

0 comments on commit 2837e15

Please sign in to comment.