memcache.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219
  1. """
  2. flask_caching.backends.memcache
  3. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
  4. The memcache caching backend.
  5. :copyright: (c) 2018 by Peter Justin.
  6. :copyright: (c) 2010 by Thadeus Burgess.
  7. :license: BSD, see LICENSE for more details.
  8. """
  9. import pickle
  10. import re
  11. from cachelib import MemcachedCache as CachelibMemcachedCache
  12. from flask_caching.backends.base import BaseCache
  13. _test_memcached_key = re.compile(r"[^\x00-\x21\xff]{1,250}$").match
  14. class MemcachedCache(BaseCache, CachelibMemcachedCache):
  15. """A cache that uses memcached as backend.
  16. The first argument can either be an object that resembles the API of a
  17. :class:`memcache.Client` or a tuple/list of server addresses. In the
  18. event that a tuple/list is passed, Werkzeug tries to import the best
  19. available memcache library.
  20. This cache looks into the following packages/modules to find bindings for
  21. memcached:
  22. - ``pylibmc``
  23. - ``google.appengine.api.memcached``
  24. - ``memcached``
  25. - ``libmc``
  26. Implementation notes: This cache backend works around some limitations in
  27. memcached to simplify the interface. For example unicode keys are encoded
  28. to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return
  29. the keys in the same format as passed. Furthermore all get methods
  30. silently ignore key errors to not cause problems when untrusted user data
  31. is passed to the get methods which is often the case in web applications.
  32. :param servers: a list or tuple of server addresses or alternatively
  33. a :class:`memcache.Client` or a compatible client.
  34. :param default_timeout: the default timeout that is used if no timeout is
  35. specified on :meth:`~BaseCache.set`. A timeout of
  36. 0 indicates that the cache never expires.
  37. :param key_prefix: a prefix that is added before all keys. This makes it
  38. possible to use the same memcached server for different
  39. applications. Keep in mind that
  40. :meth:`~BaseCache.clear` will also clear keys with a
  41. different prefix.
  42. """
  43. def __init__(self, servers=None, default_timeout=300, key_prefix=None):
  44. BaseCache.__init__(self, default_timeout=default_timeout)
  45. CachelibMemcachedCache.__init__(
  46. self,
  47. servers=servers,
  48. default_timeout=default_timeout,
  49. key_prefix=key_prefix,
  50. )
  51. @classmethod
  52. def factory(cls, app, config, args, kwargs):
  53. args.append(config["CACHE_MEMCACHED_SERVERS"])
  54. kwargs.update(dict(key_prefix=config["CACHE_KEY_PREFIX"]))
  55. return cls(*args, **kwargs)
  56. def delete_many(self, *keys):
  57. new_keys = []
  58. for key in keys:
  59. key = self._normalize_key(key)
  60. if _test_memcached_key(key):
  61. new_keys.append(key)
  62. return self._client.delete_multi(new_keys)
  63. def inc(self, key, delta=1):
  64. key = self._normalize_key(key)
  65. return self._client.incr(key, delta)
  66. def dec(self, key, delta=1):
  67. key = self._normalize_key(key)
  68. return self._client.decr(key, delta)
  69. class SASLMemcachedCache(MemcachedCache):
  70. def __init__(
  71. self,
  72. servers=None,
  73. default_timeout=300,
  74. key_prefix=None,
  75. username=None,
  76. password=None,
  77. **kwargs,
  78. ):
  79. super().__init__(default_timeout=default_timeout)
  80. if servers is None:
  81. servers = ["127.0.0.1:11211"]
  82. import pylibmc
  83. self._client = pylibmc.Client(
  84. servers, username=username, password=password, binary=True, **kwargs
  85. )
  86. self.key_prefix = key_prefix
  87. @classmethod
  88. def factory(cls, app, config, args, kwargs):
  89. args.append(config["CACHE_MEMCACHED_SERVERS"])
  90. kwargs.update(
  91. dict(
  92. username=config["CACHE_MEMCACHED_USERNAME"],
  93. password=config["CACHE_MEMCACHED_PASSWORD"],
  94. key_prefix=config["CACHE_KEY_PREFIX"],
  95. )
  96. )
  97. return cls(*args, **kwargs)
  98. class SpreadSASLMemcachedCache(SASLMemcachedCache):
  99. """Simple Subclass of SASLMemcached client that will spread the value
  100. across multiple keys if they are bigger than a given threshold.
  101. Spreading requires using pickle to store the value, which can significantly
  102. impact the performance.
  103. """
  104. def __init__(self, *args, **kwargs):
  105. """
  106. Kwargs:
  107. chunksize (int): max length of a pickled object that can fit in
  108. memcached (memcache has an upper limit of 1MB for values,
  109. default: 1048448)
  110. """
  111. self.chunksize = kwargs.get("chunksize", 1048448)
  112. self.maxchunk = kwargs.get("maxchunk", 32)
  113. super().__init__(*args, **kwargs)
  114. @classmethod
  115. def factory(cls, app, config, args, kwargs):
  116. args.append(config["CACHE_MEMCACHED_SERVERS"])
  117. kwargs.update(
  118. dict(
  119. username=config.get("CACHE_MEMCACHED_USERNAME"),
  120. password=config.get("CACHE_MEMCACHED_PASSWORD"),
  121. key_prefix=config.get("CACHE_KEY_PREFIX"),
  122. )
  123. )
  124. return cls(*args, **kwargs)
  125. def delete(self, key):
  126. for skey in self._genkeys(key):
  127. super().delete(skey)
  128. def set(self, key, value, timeout=None, chunk=True):
  129. """Set a value in cache, potentially spreading it across multiple key.
  130. :param key: The cache key.
  131. :param value: The value to cache.
  132. :param timeout: The timeout after which the cache will be invalidated.
  133. :param chunk: If set to `False`, then spreading across multiple keys
  134. is disabled. This can be faster, but it will fail if
  135. the value is bigger than the chunks. It requires you
  136. to get back the object by specifying that it is not
  137. spread.
  138. """
  139. if chunk:
  140. return self._set(key, value, timeout=timeout)
  141. else:
  142. return super().set(key, value, timeout=timeout)
  143. def _set(self, key, value, timeout=None):
  144. # pickling/unpickling add an overhead,
  145. # I didn't found a good way to avoid pickling/unpickling if
  146. # key is smaller than chunksize, because in case or <werkzeug.requests>
  147. # getting the length consume the data iterator.
  148. serialized = pickle.dumps(value, 2)
  149. values = {}
  150. len_ser = len(serialized)
  151. chks = range(0, len_ser, self.chunksize)
  152. if len(chks) > self.maxchunk:
  153. raise ValueError("Cannot store value in less than %s keys" % self.maxchunk)
  154. for i in chks:
  155. values[f"{key}.{i // self.chunksize}"] = serialized[i : i + self.chunksize]
  156. super().set_many(values, timeout)
  157. def get(self, key, chunk=True):
  158. """Get a cached value.
  159. :param chunk: If set to ``False``, it will return a cached value
  160. that is spread across multiple keys.
  161. """
  162. if chunk:
  163. return self._get(key)
  164. else:
  165. return super().get(key)
  166. def _genkeys(self, key):
  167. return [f"{key}.{i}" for i in range(self.maxchunk)]
  168. def _get(self, key):
  169. to_get = [f"{key}.{i}" for i in range(self.maxchunk)]
  170. result = super().get_many(*to_get)
  171. serialized = b"".join(v for v in result if v is not None)
  172. if not serialized:
  173. return None
  174. return pickle.loads(serialized)