Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HOTFIX: Support lists of servers. #12

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 27 additions & 21 deletions newcache.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from threading import local

from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
from django.utils import six
from django.utils.hashcompat import sha_constructor
from django.utils.encoding import smart_str
from django.conf import settings
Expand All @@ -24,7 +25,7 @@
import memcache
NotFoundError = ValueError
except ImportError:
raise InvalidCacheBackendError('Memcached cache backend requires ' +
raise InvalidCacheBackendError('Memcached cache backend requires ' +
'either the "pylibmc" or "memcache" library')

# Flavor is used amongst multiple apps to differentiate the "flavor" of the
Expand Down Expand Up @@ -54,10 +55,15 @@ class CacheClass(BaseCache):

def __init__(self, server, params):
super(CacheClass, self).__init__(params)
self._servers = server.split(';')

if isinstance(server, six.string_types):
self._servers = server.split(';')
else:
self._servers = server

self._use_binary = bool(params.get('binary'))
self._local = local()

@property
def _cache(self):
"""
Expand All @@ -66,17 +72,17 @@ def _cache(self):
client = getattr(self._local, 'client', None)
if client:
return client

# Use binary mode if it's both supported and requested
if using_pylibmc and self._use_binary:
client = memcache.Client(self._servers, binary=True)
else:
client = memcache.Client(self._servers)

# If we're using pylibmc, set the behaviors according to settings
if using_pylibmc:
client.behaviors = CACHE_BEHAVIORS

self._local.client = client
return client

Expand All @@ -87,7 +93,7 @@ def _pack_value(self, value, timeout):
"""
herd_timeout = (timeout or self.default_timeout) + int(time.time())
return (MARKER, value, herd_timeout)

def _unpack_value(self, value, default=None):
"""
Unpacks a value and returns a tuple whose first element is the value,
Expand Down Expand Up @@ -137,17 +143,17 @@ def get(self, key, default=None):
packed = self._cache.get(encoded_key)
if packed is None:
return default

val, refresh = self._unpack_value(packed)

# If the cache has expired according to the embedded timeout, then
# shove it back into the cache for a while, but act as if it was a
# cache miss.
if refresh:
self._cache.set(encoded_key, val,
self._get_memcache_timeout(CACHE_HERD_TIMEOUT))
return default

return val

def set(self, key, value, timeout=None, herd=True):
Expand All @@ -168,36 +174,36 @@ def delete(self, key):
def get_many(self, keys):
# First, map all of the keys through our key function
rvals = map(key_func, keys)

packed_resp = self._cache.get_multi(rvals)

resp = {}
reinsert = {}

for key, packed in packed_resp.iteritems():
# If it was a miss, treat it as a miss to our response & continue
if packed is None:
resp[key] = packed
continue

val, refresh = self._unpack_value(packed)
if refresh:
reinsert[key] = val
resp[key] = None
else:
resp[key] = val

# If there are values to re-insert for a short period of time, then do
# so now.
if reinsert:
self._cache.set_multi(reinsert,
self._get_memcache_timeout(CACHE_HERD_TIMEOUT))

# Build a reverse map of encoded keys to the original keys, so that
# the returned dict's keys are what users expect (in that they match
# what the user originally entered)
reverse = dict(zip(rvals, keys))

return dict(((reverse[k], v) for k, v in resp.iteritems()))

def close(self, **kwargs):
Expand All @@ -214,7 +220,7 @@ def decr(self, key, delta=1):
return self._cache.decr(key_func(key), delta)
except NotFoundError:
raise ValueError("Key '%s' not found" % (key,))

def set_many(self, data, timeout=None, herd=True):
if herd and timeout != 0:
safe_data = dict(((key_func(k), self._pack_value(v, timeout))
Expand All @@ -223,9 +229,9 @@ def set_many(self, data, timeout=None, herd=True):
safe_data = dict((
(key_func(k), v) for k, v in data.iteritems()))
self._cache.set_multi(safe_data, self._get_memcache_timeout(timeout))

def delete_many(self, keys):
self._cache.delete_multi(map(key_func, keys))

def clear(self):
self._cache.flush_all()
self._cache.flush_all()