redis.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333
  1. # -*- coding: utf-8 -*-
  2. """Redis result store backend."""
  3. from __future__ import absolute_import, unicode_literals
  4. from functools import partial
  5. from kombu.utils.functional import retry_over_time
  6. from kombu.utils.objects import cached_property
  7. from kombu.utils.url import _parse_url
  8. from celery import states
  9. from celery._state import task_join_will_block
  10. from celery.canvas import maybe_signature
  11. from celery.exceptions import ChordError, ImproperlyConfigured
  12. from celery.five import string_t
  13. from celery.utils import deprecated
  14. from celery.utils.functional import dictfilter
  15. from celery.utils.log import get_logger
  16. from celery.utils.time import humanize_seconds
  17. from . import async
  18. from . import base
  19. try:
  20. import redis
  21. from kombu.transport.redis import get_redis_error_classes
  22. except ImportError: # pragma: no cover
  23. redis = None # noqa
  24. get_redis_error_classes = None # noqa
  25. __all__ = ['RedisBackend']
  26. E_REDIS_MISSING = """
  27. You need to install the redis library in order to use \
  28. the Redis result store backend.
  29. """
  30. E_LOST = 'Connection to Redis lost: Retry (%s/%s) %s.'
  31. logger = get_logger(__name__)
  32. class ResultConsumer(async.BaseResultConsumer):
  33. _pubsub = None
  34. def __init__(self, *args, **kwargs):
  35. super(ResultConsumer, self).__init__(*args, **kwargs)
  36. self._get_key_for_task = self.backend.get_key_for_task
  37. self._decode_result = self.backend.decode_result
  38. self.subscribed_to = set()
  39. def start(self, initial_task_id, **kwargs):
  40. self._pubsub = self.backend.client.pubsub(
  41. ignore_subscribe_messages=True,
  42. )
  43. self._consume_from(initial_task_id)
  44. def on_wait_for_pending(self, result, **kwargs):
  45. for meta in result._iter_meta():
  46. if meta is not None:
  47. self.on_state_change(meta, None)
  48. def stop(self):
  49. if self._pubsub is not None:
  50. self._pubsub.close()
  51. def drain_events(self, timeout=None):
  52. m = self._pubsub.get_message(timeout=timeout)
  53. if m and m['type'] == 'message':
  54. self.on_state_change(self._decode_result(m['data']), m)
  55. def consume_from(self, task_id):
  56. if self._pubsub is None:
  57. return self.start(task_id)
  58. self._consume_from(task_id)
  59. def _consume_from(self, task_id):
  60. key = self._get_key_for_task(task_id)
  61. if key not in self.subscribed_to:
  62. self.subscribed_to.add(key)
  63. self._pubsub.subscribe(key)
  64. def cancel_for(self, task_id):
  65. if self._pubsub:
  66. key = self._get_key_for_task(task_id)
  67. self.subscribed_to.discard(key)
  68. self._pubsub.unsubscribe(key)
  69. class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
  70. """Redis task result store."""
  71. ResultConsumer = ResultConsumer
  72. #: :pypi:`redis` client module.
  73. redis = redis
  74. #: Maximum number of connections in the pool.
  75. max_connections = None
  76. supports_autoexpire = True
  77. supports_native_join = True
  78. def __init__(self, host=None, port=None, db=None, password=None,
  79. max_connections=None, url=None,
  80. connection_pool=None, **kwargs):
  81. super(RedisBackend, self).__init__(expires_type=int, **kwargs)
  82. _get = self.app.conf.get
  83. if self.redis is None:
  84. raise ImproperlyConfigured(E_REDIS_MISSING.strip())
  85. if host and '://' in host:
  86. url, host = host, None
  87. self.max_connections = (
  88. max_connections or
  89. _get('redis_max_connections') or
  90. self.max_connections)
  91. self._ConnectionPool = connection_pool
  92. socket_timeout = _get('redis_socket_timeout')
  93. socket_connect_timeout = _get('redis_socket_connect_timeout')
  94. self.connparams = {
  95. 'host': _get('redis_host') or 'localhost',
  96. 'port': _get('redis_port') or 6379,
  97. 'db': _get('redis_db') or 0,
  98. 'password': _get('redis_password'),
  99. 'max_connections': self.max_connections,
  100. 'socket_timeout': socket_timeout and float(socket_timeout),
  101. 'socket_connect_timeout':
  102. socket_connect_timeout and float(socket_connect_timeout),
  103. }
  104. if url:
  105. self.connparams = self._params_from_url(url, self.connparams)
  106. self.url = url
  107. self.connection_errors, self.channel_errors = (
  108. get_redis_error_classes() if get_redis_error_classes
  109. else ((), ()))
  110. self.result_consumer = self.ResultConsumer(
  111. self, self.app, self.accept,
  112. self._pending_results, self._pending_messages,
  113. )
  114. def _params_from_url(self, url, defaults):
  115. scheme, host, port, _, password, path, query = _parse_url(url)
  116. connparams = dict(
  117. defaults, **dictfilter({
  118. 'host': host, 'port': port, 'password': password,
  119. 'db': query.pop('virtual_host', None)})
  120. )
  121. if scheme == 'socket':
  122. # use 'path' as path to the socket… in this case
  123. # the database number should be given in 'query'
  124. connparams.update({
  125. 'connection_class': self.redis.UnixDomainSocketConnection,
  126. 'path': '/' + path,
  127. })
  128. # host+port are invalid options when using this connection type.
  129. connparams.pop('host', None)
  130. connparams.pop('port', None)
  131. connparams.pop('socket_connect_timeout')
  132. else:
  133. connparams['db'] = path
  134. # db may be string and start with / like in kombu.
  135. db = connparams.get('db') or 0
  136. db = db.strip('/') if isinstance(db, string_t) else db
  137. connparams['db'] = int(db)
  138. # Query parameters override other parameters
  139. connparams.update(query)
  140. return connparams
  141. def on_task_call(self, producer, task_id):
  142. if not task_join_will_block():
  143. self.result_consumer.consume_from(task_id)
  144. def get(self, key):
  145. return self.client.get(key)
  146. def mget(self, keys):
  147. return self.client.mget(keys)
  148. def ensure(self, fun, args, **policy):
  149. retry_policy = dict(self.retry_policy, **policy)
  150. max_retries = retry_policy.get('max_retries')
  151. return retry_over_time(
  152. fun, self.connection_errors, args, {},
  153. partial(self.on_connection_error, max_retries),
  154. **retry_policy)
  155. def on_connection_error(self, max_retries, exc, intervals, retries):
  156. tts = next(intervals)
  157. logger.error(
  158. E_LOST.strip(),
  159. retries, max_retries or 'Inf', humanize_seconds(tts, 'in '))
  160. return tts
  161. def set(self, key, value, **retry_policy):
  162. return self.ensure(self._set, (key, value), **retry_policy)
  163. def _set(self, key, value):
  164. with self.client.pipeline() as pipe:
  165. if self.expires:
  166. pipe.setex(key, self.expires, value)
  167. else:
  168. pipe.set(key, value)
  169. pipe.publish(key, value)
  170. pipe.execute()
  171. def delete(self, key):
  172. self.client.delete(key)
  173. def incr(self, key):
  174. return self.client.incr(key)
  175. def expire(self, key, value):
  176. return self.client.expire(key, value)
  177. def add_to_chord(self, group_id, result):
  178. self.client.incr(self.get_key_for_group(group_id, '.t'), 1)
  179. def _unpack_chord_result(self, tup, decode,
  180. EXCEPTION_STATES=states.EXCEPTION_STATES,
  181. PROPAGATE_STATES=states.PROPAGATE_STATES):
  182. _, tid, state, retval = decode(tup)
  183. if state in EXCEPTION_STATES:
  184. retval = self.exception_to_python(retval)
  185. if state in PROPAGATE_STATES:
  186. raise ChordError('Dependency {0} raised {1!r}'.format(tid, retval))
  187. return retval
  188. def apply_chord(self, header, partial_args, group_id, body,
  189. result=None, options={}, **kwargs):
  190. # Overrides this to avoid calling GroupResult.save
  191. # pylint: disable=method-hidden
  192. # Note that KeyValueStoreBackend.__init__ sets self.apply_chord
  193. # if the implements_incr attr is set. Redis backend doesn't set
  194. # this flag.
  195. options['task_id'] = group_id
  196. return header(*partial_args, **options or {})
  197. def on_chord_part_return(self, request, state, result,
  198. propagate=None, **kwargs):
  199. app = self.app
  200. tid, gid = request.id, request.group
  201. if not gid or not tid:
  202. return
  203. client = self.client
  204. jkey = self.get_key_for_group(gid, '.j')
  205. tkey = self.get_key_for_group(gid, '.t')
  206. result = self.encode_result(result, state)
  207. with client.pipeline() as pipe:
  208. _, readycount, totaldiff, _, _ = pipe \
  209. .rpush(jkey, self.encode([1, tid, state, result])) \
  210. .llen(jkey) \
  211. .get(tkey) \
  212. .expire(jkey, self.expires) \
  213. .expire(tkey, self.expires) \
  214. .execute()
  215. totaldiff = int(totaldiff or 0)
  216. try:
  217. callback = maybe_signature(request.chord, app=app)
  218. total = callback['chord_size'] + totaldiff
  219. if readycount == total:
  220. decode, unpack = self.decode, self._unpack_chord_result
  221. with client.pipeline() as pipe:
  222. resl, _, _ = pipe \
  223. .lrange(jkey, 0, total) \
  224. .delete(jkey) \
  225. .delete(tkey) \
  226. .execute()
  227. try:
  228. callback.delay([unpack(tup, decode) for tup in resl])
  229. except Exception as exc: # pylint: disable=broad-except
  230. logger.exception(
  231. 'Chord callback for %r raised: %r', request.group, exc)
  232. return self.chord_error_from_stack(
  233. callback,
  234. ChordError('Callback error: {0!r}'.format(exc)),
  235. )
  236. except ChordError as exc:
  237. logger.exception('Chord %r raised: %r', request.group, exc)
  238. return self.chord_error_from_stack(callback, exc)
  239. except Exception as exc: # pylint: disable=broad-except
  240. logger.exception('Chord %r raised: %r', request.group, exc)
  241. return self.chord_error_from_stack(
  242. callback,
  243. ChordError('Join error: {0!r}'.format(exc)),
  244. )
  245. def _create_client(self, **params):
  246. return self.redis.StrictRedis(
  247. connection_pool=self.ConnectionPool(**params),
  248. )
  249. @property
  250. def ConnectionPool(self):
  251. if self._ConnectionPool is None:
  252. self._ConnectionPool = self.redis.ConnectionPool
  253. return self._ConnectionPool
  254. @cached_property
  255. def client(self):
  256. return self._create_client(**self.connparams)
  257. def __reduce__(self, args=(), kwargs={}):
  258. return super(RedisBackend, self).__reduce__(
  259. (self.url,), {'expires': self.expires},
  260. )
  261. @deprecated.Property(4.0, 5.0)
  262. def host(self):
  263. return self.connparams['host']
  264. @deprecated.Property(4.0, 5.0)
  265. def port(self):
  266. return self.connparams['port']
  267. @deprecated.Property(4.0, 5.0)
  268. def db(self):
  269. return self.connparams['db']
  270. @deprecated.Property(4.0, 5.0)
  271. def password(self):
  272. return self.connparams['password']