From 1eb09614e94eaf0b607a1b610fc6ce96b3fda764 Mon Sep 17 00:00:00 2001 From: Kris Bandurski Date: Mon, 15 Jul 2013 13:52:49 +0100 Subject: [PATCH] HOTFIX: Support lists of servers. --- newcache.py | 48 +++++++++++++++++++++++++++--------------------- 1 file changed, 27 insertions(+), 21 deletions(-) diff --git a/newcache.py b/newcache.py index 1a26e17..0a4694f 100644 --- a/newcache.py +++ b/newcache.py @@ -5,6 +5,7 @@ from threading import local from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError +from django.utils import six from django.utils.hashcompat import sha_constructor from django.utils.encoding import smart_str from django.conf import settings @@ -24,7 +25,7 @@ import memcache NotFoundError = ValueError except ImportError: - raise InvalidCacheBackendError('Memcached cache backend requires ' + + raise InvalidCacheBackendError('Memcached cache backend requires ' + 'either the "pylibmc" or "memcache" library') # Flavor is used amongst multiple apps to differentiate the "flavor" of the @@ -54,10 +55,15 @@ class CacheClass(BaseCache): def __init__(self, server, params): super(CacheClass, self).__init__(params) - self._servers = server.split(';') + + if isinstance(server, six.string_types): + self._servers = server.split(';') + else: + self._servers = server + self._use_binary = bool(params.get('binary')) self._local = local() - + @property def _cache(self): """ @@ -66,17 +72,17 @@ def _cache(self): client = getattr(self._local, 'client', None) if client: return client - + # Use binary mode if it's both supported and requested if using_pylibmc and self._use_binary: client = memcache.Client(self._servers, binary=True) else: client = memcache.Client(self._servers) - + # If we're using pylibmc, set the behaviors according to settings if using_pylibmc: client.behaviors = CACHE_BEHAVIORS - + self._local.client = client return client @@ -87,7 +93,7 @@ def _pack_value(self, value, timeout): """ herd_timeout = (timeout or self.default_timeout) + int(time.time()) return (MARKER, value, herd_timeout) - + def _unpack_value(self, value, default=None): """ Unpacks a value and returns a tuple whose first element is the value, @@ -137,9 +143,9 @@ def get(self, key, default=None): packed = self._cache.get(encoded_key) if packed is None: return default - + val, refresh = self._unpack_value(packed) - + # If the cache has expired according to the embedded timeout, then # shove it back into the cache for a while, but act as if it was a # cache miss. @@ -147,7 +153,7 @@ def get(self, key, default=None): self._cache.set(encoded_key, val, self._get_memcache_timeout(CACHE_HERD_TIMEOUT)) return default - + return val def set(self, key, value, timeout=None, herd=True): @@ -168,36 +174,36 @@ def delete(self, key): def get_many(self, keys): # First, map all of the keys through our key function rvals = map(key_func, keys) - + packed_resp = self._cache.get_multi(rvals) - + resp = {} reinsert = {} - + for key, packed in packed_resp.iteritems(): # If it was a miss, treat it as a miss to our response & continue if packed is None: resp[key] = packed continue - + val, refresh = self._unpack_value(packed) if refresh: reinsert[key] = val resp[key] = None else: resp[key] = val - + # If there are values to re-insert for a short period of time, then do # so now. if reinsert: self._cache.set_multi(reinsert, self._get_memcache_timeout(CACHE_HERD_TIMEOUT)) - + # Build a reverse map of encoded keys to the original keys, so that # the returned dict's keys are what users expect (in that they match # what the user originally entered) reverse = dict(zip(rvals, keys)) - + return dict(((reverse[k], v) for k, v in resp.iteritems())) def close(self, **kwargs): @@ -214,7 +220,7 @@ def decr(self, key, delta=1): return self._cache.decr(key_func(key), delta) except NotFoundError: raise ValueError("Key '%s' not found" % (key,)) - + def set_many(self, data, timeout=None, herd=True): if herd and timeout != 0: safe_data = dict(((key_func(k), self._pack_value(v, timeout)) @@ -223,9 +229,9 @@ def set_many(self, data, timeout=None, herd=True): safe_data = dict(( (key_func(k), v) for k, v in data.iteritems())) self._cache.set_multi(safe_data, self._get_memcache_timeout(timeout)) - + def delete_many(self, keys): self._cache.delete_multi(map(key_func, keys)) - + def clear(self): - self._cache.flush_all() \ No newline at end of file + self._cache.flush_all()