amqp.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.app.amqp
  4. ~~~~~~~~~~~~~~~
  5. AMQ related functionality.
  6. :copyright: (c) 2009 - 2011 by Ask Solem.
  7. :license: BSD, see LICENSE for more details.
  8. """
  9. from __future__ import absolute_import
  10. from datetime import datetime, timedelta
  11. from kombu import BrokerConnection, Exchange
  12. from kombu import compat as messaging
  13. from kombu.pools import ProducerPool
  14. from .. import routes as _routes
  15. from .. import signals
  16. from ..utils import cached_property, textindent, uuid
  17. # UTC timezone mark.
  18. # Defaults to local in 2.5, and UTC in 3.x.
  19. TZ_LOCAL = 0x0
  20. TZ_UTC = 0x1
  21. #: List of known options to a Kombu producers send method.
  22. #: Used to extract the message related options out of any `dict`.
  23. MSG_OPTIONS = ("mandatory", "priority", "immediate", "routing_key",
  24. "serializer", "delivery_mode", "compression")
  25. #: Human readable queue declaration.
  26. QUEUE_FORMAT = """
  27. . %(name)s exchange:%(exchange)s (%(exchange_type)s) \
  28. binding:%(binding_key)s
  29. """
  30. #: Set of exchange names that have already been declared.
  31. _exchanges_declared = set()
  32. #: Set of queue names that have already been declared.
  33. _queues_declared = set()
  34. def extract_msg_options(options, keep=MSG_OPTIONS):
  35. """Extracts known options to `basic_publish` from a dict,
  36. and returns a new dict."""
  37. return dict((name, options.get(name)) for name in keep)
  38. class Queues(dict):
  39. """Queue name⇒ declaration mapping.
  40. Celery will consult this mapping to find the options
  41. for any queue by name.
  42. :param queues: Initial mapping.
  43. """
  44. #: If set, this is a subset of queues to consume from.
  45. #: The rest of the queues are then used for routing only.
  46. _consume_from = None
  47. def __init__(self, queues):
  48. dict.__init__(self)
  49. for queue_name, options in (queues or {}).items():
  50. self.add(queue_name, **options)
  51. def add(self, queue, exchange=None, routing_key=None,
  52. exchange_type="direct", **options):
  53. """Add new queue.
  54. :param queue: Name of the queue.
  55. :keyword exchange: Name of the exchange.
  56. :keyword routing_key: Binding key.
  57. :keyword exchange_type: Type of exchange.
  58. :keyword \*\*options: Additional declaration options.
  59. """
  60. q = self[queue] = self.options(exchange, routing_key,
  61. exchange_type, **options)
  62. return q
  63. def options(self, exchange, routing_key,
  64. exchange_type="direct", **options):
  65. """Creates new option mapping for queue, with required
  66. keys present."""
  67. return dict(options, routing_key=routing_key,
  68. binding_key=routing_key,
  69. exchange=exchange,
  70. exchange_type=exchange_type)
  71. def format(self, indent=0, indent_first=True):
  72. """Format routing table into string for log dumps."""
  73. active = self.consume_from
  74. if not active:
  75. return ""
  76. info = [QUEUE_FORMAT.strip() % dict(
  77. name=(name + ":").ljust(12), **config)
  78. for name, config in sorted(active.iteritems())]
  79. if indent_first:
  80. return textindent("\n".join(info), indent)
  81. return info[0] + "\n" + textindent("\n".join(info[1:]), indent)
  82. def select_subset(self, wanted, create_missing=True):
  83. """Select subset of the currently defined queues.
  84. Does not return anything: queues not in `wanted` will
  85. be discarded in-place.
  86. :param wanted: List of wanted queue names.
  87. :keyword create_missing: By default any unknown queues will be
  88. added automatically, but if disabled
  89. the occurrence of unknown queues
  90. in `wanted` will raise :exc:`KeyError`.
  91. """
  92. acc = {}
  93. for queue in wanted:
  94. try:
  95. options = self[queue]
  96. except KeyError:
  97. if not create_missing:
  98. raise
  99. options = self.options(queue, queue)
  100. acc[queue] = options
  101. self._consume_from = acc
  102. self.update(acc)
  103. @property
  104. def consume_from(self):
  105. if self._consume_from is not None:
  106. return self._consume_from
  107. return self
  108. @classmethod
  109. def with_defaults(cls, queues, default_exchange, default_exchange_type):
  110. """Alternate constructor that adds default exchange and
  111. exchange type information to queues that does not have any."""
  112. if queues is None:
  113. queues = {}
  114. for opts in queues.values():
  115. opts.setdefault("exchange", default_exchange),
  116. opts.setdefault("exchange_type", default_exchange_type)
  117. opts.setdefault("binding_key", default_exchange)
  118. opts.setdefault("routing_key", opts.get("binding_key"))
  119. return cls(queues)
  120. class TaskPublisher(messaging.Publisher):
  121. auto_declare = True
  122. retry = False
  123. retry_policy = None
  124. def __init__(self, *args, **kwargs):
  125. self.app = kwargs.pop("app")
  126. self.retry = kwargs.pop("retry", self.retry)
  127. self.retry_policy = kwargs.pop("retry_policy",
  128. self.retry_policy or {})
  129. super(TaskPublisher, self).__init__(*args, **kwargs)
  130. def declare(self):
  131. if self.exchange.name and \
  132. self.exchange.name not in _exchanges_declared:
  133. super(TaskPublisher, self).declare()
  134. _exchanges_declared.add(self.exchange.name)
  135. def _declare_queue(self, name, retry=False, retry_policy={}):
  136. options = self.app.queues[name]
  137. queue = messaging.entry_to_queue(name, **options)(self.channel)
  138. if retry:
  139. self.connection.ensure(queue, queue.declare, **retry_policy)()
  140. else:
  141. queue.declare()
  142. return queue
  143. def _declare_exchange(self, name, type, retry=False, retry_policy={}):
  144. ex = Exchange(name, type=type, durable=self.durable,
  145. auto_delete=self.auto_delete)(self.channel)
  146. if retry:
  147. return self.connection.ensure(ex, ex.declare, **retry_policy)
  148. return ex.declare()
  149. def delay_task(self, task_name, task_args=None, task_kwargs=None,
  150. countdown=None, eta=None, task_id=None, taskset_id=None,
  151. expires=None, exchange=None, exchange_type=None,
  152. event_dispatcher=None, retry=None, retry_policy=None,
  153. queue=None, now=None, retries=0, chord=None, **kwargs):
  154. """Send task message."""
  155. connection = self.connection
  156. _retry_policy = self.retry_policy
  157. if retry_policy: # merge default and custom policy
  158. _retry_policy = dict(_retry_policy, **retry_policy)
  159. # declare entities
  160. if queue and queue not in _queues_declared:
  161. entity = self._declare_queue(queue, retry, _retry_policy)
  162. _exchanges_declared.add(entity.exchange.name)
  163. _queues_declared.add(entity.name)
  164. if exchange and exchange not in _exchanges_declared:
  165. self._declare_exchange(exchange,
  166. exchange_type or self.exchange_type, retry, _retry_policy)
  167. _exchanges_declared.add(exchange)
  168. task_id = task_id or uuid()
  169. task_args = task_args or []
  170. task_kwargs = task_kwargs or {}
  171. if not isinstance(task_args, (list, tuple)):
  172. raise ValueError("task args must be a list or tuple")
  173. if not isinstance(task_kwargs, dict):
  174. raise ValueError("task kwargs must be a dictionary")
  175. if countdown: # Convert countdown to ETA.
  176. now = now or datetime.utcnow()
  177. eta = now + timedelta(seconds=countdown)
  178. if isinstance(expires, int):
  179. now = now or datetime.utcnow()
  180. expires = now + timedelta(seconds=expires)
  181. eta = eta and eta.isoformat()
  182. expires = expires and expires.isoformat()
  183. body = {"task": task_name,
  184. "id": task_id,
  185. "args": task_args or [],
  186. "kwargs": task_kwargs or {},
  187. "retries": retries or 0,
  188. "eta": eta,
  189. "expires": expires,
  190. "tz": TZ_LOCAL}
  191. if taskset_id:
  192. body["taskset"] = taskset_id
  193. if chord:
  194. body["chord"] = chord
  195. do_retry = retry if retry is not None else self.retry
  196. send = self.send
  197. if do_retry:
  198. send = connection.ensure(self, self.send, **_retry_policy)
  199. send(body, exchange=exchange, **extract_msg_options(kwargs))
  200. signals.task_sent.send(sender=task_name, **body)
  201. if event_dispatcher:
  202. event_dispatcher.send("task-sent", uuid=task_id,
  203. name=task_name,
  204. args=repr(task_args),
  205. kwargs=repr(task_kwargs),
  206. retries=retries,
  207. eta=eta,
  208. expires=expires)
  209. return task_id
  210. def __exit__(self, *exc_info):
  211. try:
  212. self.release()
  213. except AttributeError:
  214. self.close()
  215. class PublisherPool(ProducerPool):
  216. def __init__(self, app):
  217. self.app = app
  218. super(PublisherPool, self).__init__(self.app.pool,
  219. limit=self.app.pool.limit)
  220. def create_producer(self):
  221. conn = self.connections.acquire(block=True)
  222. pub = self.app.amqp.TaskPublisher(conn, auto_declare=False)
  223. conn._producer_chan = pub.channel
  224. return pub
  225. class AMQP(object):
  226. BrokerConnection = BrokerConnection
  227. Publisher = messaging.Publisher
  228. Consumer = messaging.Consumer
  229. ConsumerSet = messaging.ConsumerSet
  230. #: Cached and prepared routing table.
  231. _rtable = None
  232. def __init__(self, app):
  233. self.app = app
  234. def flush_routes(self):
  235. self._rtable = _routes.prepare(self.app.conf.CELERY_ROUTES)
  236. def Queues(self, queues):
  237. """Create new :class:`Queues` instance, using queue defaults
  238. from the current configuration."""
  239. conf = self.app.conf
  240. if not queues and conf.CELERY_DEFAULT_QUEUE:
  241. queues = {conf.CELERY_DEFAULT_QUEUE: {
  242. "exchange": conf.CELERY_DEFAULT_EXCHANGE,
  243. "exchange_type": conf.CELERY_DEFAULT_EXCHANGE_TYPE,
  244. "binding_key": conf.CELERY_DEFAULT_ROUTING_KEY}}
  245. return Queues.with_defaults(queues, conf.CELERY_DEFAULT_EXCHANGE,
  246. conf.CELERY_DEFAULT_EXCHANGE_TYPE)
  247. def Router(self, queues=None, create_missing=None):
  248. """Returns the current task router."""
  249. return _routes.Router(self.routes, queues or self.queues,
  250. self.app.either("CELERY_CREATE_MISSING_QUEUES",
  251. create_missing), app=self.app)
  252. def TaskConsumer(self, *args, **kwargs):
  253. """Returns consumer for a single task queue."""
  254. default_queue_name, default_queue = self.get_default_queue()
  255. defaults = dict({"queue": default_queue_name}, **default_queue)
  256. defaults["routing_key"] = defaults.pop("binding_key", None)
  257. return self.Consumer(*args,
  258. **self.app.merge(defaults, kwargs))
  259. def TaskPublisher(self, *args, **kwargs):
  260. """Returns publisher used to send tasks.
  261. You should use `app.send_task` instead.
  262. """
  263. conf = self.app.conf
  264. _, default_queue = self.get_default_queue()
  265. defaults = {"exchange": default_queue["exchange"],
  266. "exchange_type": default_queue["exchange_type"],
  267. "routing_key": conf.CELERY_DEFAULT_ROUTING_KEY,
  268. "serializer": conf.CELERY_TASK_SERIALIZER,
  269. "retry": conf.CELERY_TASK_PUBLISH_RETRY,
  270. "retry_policy": conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
  271. "app": self}
  272. return TaskPublisher(*args, **self.app.merge(defaults, kwargs))
  273. def get_task_consumer(self, connection, queues=None, **kwargs):
  274. """Return consumer configured to consume from all known task
  275. queues."""
  276. return self.ConsumerSet(connection,
  277. from_dict=queues or self.queues.consume_from,
  278. **kwargs)
  279. def get_default_queue(self):
  280. """Returns `(queue_name, queue_options)` tuple for the queue
  281. configured to be default (:setting:`CELERY_DEFAULT_QUEUE`)."""
  282. q = self.app.conf.CELERY_DEFAULT_QUEUE
  283. return q, self.queues[q]
  284. @cached_property
  285. def queues(self):
  286. """Queue name⇒ declaration mapping."""
  287. return self.Queues(self.app.conf.CELERY_QUEUES)
  288. @property
  289. def routes(self):
  290. if self._rtable is None:
  291. self.flush_routes()
  292. return self._rtable
  293. @cached_property
  294. def publisher_pool(self):
  295. return PublisherPool(self.app)