amqp.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.app.amqp
  4. ===============
  5. AMQ related functionality.
  6. :copyright: (c) 2009 - 2011 by Ask Solem.
  7. :license: BSD, see LICENSE for more details.
  8. """
  9. from datetime import datetime, timedelta
  10. from kombu import BrokerConnection, Exchange
  11. from kombu.connection import Resource
  12. from kombu import compat as messaging
  13. from kombu.utils import cached_property
  14. from celery import routes as _routes
  15. from celery import signals
  16. from celery.utils import gen_unique_id, textindent
  17. from celery.utils import promise, maybe_promise
  18. #: List of known options to a Kombu producers send method.
  19. #: Used to extract the message related options out of any `dict`.
  20. MSG_OPTIONS = ("mandatory", "priority", "immediate", "routing_key",
  21. "serializer", "delivery_mode", "compression")
  22. #: Human readable queue declaration.
  23. QUEUE_FORMAT = """
  24. . %(name)s exchange:%(exchange)s (%(exchange_type)s) \
  25. binding:%(binding_key)s
  26. """
  27. #: Set of exchange names that have already been declared.
  28. _exchanges_declared = set()
  29. #: Set of queue names that have already been declared.
  30. _queues_declared = set()
  31. def extract_msg_options(options, keep=MSG_OPTIONS):
  32. """Extracts known options to `basic_publish` from a dict,
  33. and returns a new dict."""
  34. return dict((name, options.get(name)) for name in keep)
  35. class Queues(dict):
  36. """Queue name⇒ declaration mapping.
  37. Celery will consult this mapping to find the options
  38. for any queue by name.
  39. :param queues: Initial mapping.
  40. """
  41. #: If set, this is a subset of queues to consume from.
  42. #: The rest of the queues are then used for routing only.
  43. _consume_from = None
  44. def __init__(self, queues):
  45. dict.__init__(self)
  46. for queue_name, options in (queues or {}).items():
  47. self.add(queue_name, **options)
  48. def add(self, queue, exchange=None, routing_key=None,
  49. exchange_type="direct", **options):
  50. """Add new queue.
  51. :param queue: Name of the queue.
  52. :keyword exchange: Name of the exchange.
  53. :keyword routing_key: Binding key.
  54. :keyword exchange_type: Type of exchange.
  55. :keyword \*\*options: Additional declaration options.
  56. """
  57. q = self[queue] = self.options(exchange, routing_key,
  58. exchange_type, **options)
  59. return q
  60. def options(self, exchange, routing_key,
  61. exchange_type="direct", **options):
  62. """Creates new option mapping for queue, with required
  63. keys present."""
  64. return dict(options, routing_key=routing_key,
  65. binding_key=routing_key,
  66. exchange=exchange,
  67. exchange_type=exchange_type)
  68. def format(self, indent=0, indent_first=True):
  69. """Format routing table into string for log dumps."""
  70. active = self.consume_from
  71. info = [QUEUE_FORMAT.strip() % dict(
  72. name=(name + ":").ljust(12), **config)
  73. for name, config in sorted(active.iteritems())]
  74. if indent_first:
  75. return textindent("\n".join(info), indent)
  76. return info[0] + "\n" + textindent("\n".join(info[1:]), indent)
  77. def select_subset(self, wanted, create_missing=True):
  78. """Select subset of the currently defined queues.
  79. Does not return anything: queues not in `wanted` will
  80. be discarded in-place.
  81. :param wanted: List of wanted queue names.
  82. :keyword create_missing: By default any unknown queues will be
  83. added automatically, but if disabled
  84. the occurrence of unknown queues
  85. in `wanted` will raise :exc:`KeyError`.
  86. """
  87. acc = {}
  88. for queue in wanted:
  89. try:
  90. options = self[queue]
  91. except KeyError:
  92. if not create_missing:
  93. raise
  94. options = self.options(queue, queue)
  95. acc[queue] = options
  96. self._consume_from = acc
  97. self.update(acc)
  98. @property
  99. def consume_from(self):
  100. if self._consume_from is not None:
  101. return self._consume_from
  102. return self
  103. @classmethod
  104. def with_defaults(cls, queues, default_exchange, default_exchange_type):
  105. """Alternate constructor that adds default exchange and
  106. exchange type information to queues that does not have any."""
  107. for opts in queues.values():
  108. opts.setdefault("exchange", default_exchange),
  109. opts.setdefault("exchange_type", default_exchange_type)
  110. opts.setdefault("binding_key", default_exchange)
  111. opts.setdefault("routing_key", opts.get("binding_key"))
  112. return cls(queues)
  113. class TaskPublisher(messaging.Publisher):
  114. auto_declare = True
  115. retry = False
  116. retry_policy = None
  117. def __init__(self, *args, **kwargs):
  118. self.app = kwargs.pop("app")
  119. self.retry = kwargs.pop("retry", self.retry)
  120. self.retry_policy = kwargs.pop("retry_policy",
  121. self.retry_policy or {})
  122. super(TaskPublisher, self).__init__(*args, **kwargs)
  123. def declare(self):
  124. if self.exchange.name and \
  125. self.exchange.name not in _exchanges_declared:
  126. super(TaskPublisher, self).declare()
  127. _exchanges_declared.add(self.exchange.name)
  128. def _declare_queue(self, name, retry=False, retry_policy={}):
  129. options = self.app.queues[name]
  130. queue = messaging.entry_to_queue(name, **options)(self.channel)
  131. if retry:
  132. self.connection.ensure(queue, queue.declare, **retry_policy)()
  133. else:
  134. queue.declare()
  135. return queue
  136. def _declare_exchange(self, name, type, retry=False, retry_policy={}):
  137. ex = Exchange(name, type=type, durable=self.durable,
  138. auto_delete=self.auto_delete)(self.channel)
  139. if retry:
  140. return self.connection.ensure(ex, ex.declare, **retry_policy)
  141. return ex.declare()
  142. def delay_task(self, task_name, task_args=None, task_kwargs=None,
  143. countdown=None, eta=None, task_id=None, taskset_id=None,
  144. expires=None, exchange=None, exchange_type=None,
  145. event_dispatcher=None, retry=None, retry_policy=None,
  146. queue=None, now=None, retries=0, chord=None, **kwargs):
  147. """Send task message."""
  148. connection = self.connection
  149. _retry_policy = self.retry_policy
  150. if retry_policy: # merge default and custom policy
  151. _retry_policy = dict(_retry_policy, **retry_policy)
  152. # declare entities
  153. if queue and queue not in _queues_declared:
  154. entity = self._declare_queue(queue, retry, _retry_policy)
  155. _exchanges_declared.add(entity.exchange.name)
  156. _queues_declared.add(entity.name)
  157. if exchange and exchange not in _exchanges_declared:
  158. self._declare_exchange(exchange,
  159. exchange_type or self.exchange_type, retry, _retry_policy)
  160. _exchanges_declared.add(exchange)
  161. task_id = task_id or gen_unique_id()
  162. task_args = task_args or []
  163. task_kwargs = task_kwargs or {}
  164. if not isinstance(task_args, (list, tuple)):
  165. raise ValueError("task args must be a list or tuple")
  166. if not isinstance(task_kwargs, dict):
  167. raise ValueError("task kwargs must be a dictionary")
  168. if countdown: # Convert countdown to ETA.
  169. now = now or datetime.now()
  170. eta = now + timedelta(seconds=countdown)
  171. if isinstance(expires, int):
  172. now = now or datetime.now()
  173. expires = now + timedelta(seconds=expires)
  174. eta = eta and eta.isoformat()
  175. expires = expires and expires.isoformat()
  176. body = {"task": task_name,
  177. "id": task_id,
  178. "args": task_args or [],
  179. "kwargs": task_kwargs or {},
  180. "retries": retries or 0,
  181. "eta": eta,
  182. "expires": expires}
  183. if taskset_id:
  184. body["taskset"] = taskset_id
  185. if chord:
  186. body["chord"] = chord
  187. send = self.send
  188. if retry is None and self.retry or retry:
  189. send = connection.ensure(self, self.send, **_retry_policy)
  190. send(body, exchange=exchange, **extract_msg_options(kwargs))
  191. signals.task_sent.send(sender=task_name, **body)
  192. if event_dispatcher:
  193. event_dispatcher.send("task-sent", uuid=task_id,
  194. name=task_name,
  195. args=repr(task_args),
  196. kwargs=repr(task_kwargs),
  197. retries=retries,
  198. eta=eta,
  199. expires=expires)
  200. return task_id
  201. def __exit__(self, *exc_info):
  202. try:
  203. self.release()
  204. except AttributeError:
  205. self.close()
  206. class PublisherPool(Resource):
  207. def __init__(self, app=None):
  208. self.app = app
  209. super(PublisherPool, self).__init__(limit=self.app.pool.limit)
  210. def create_publisher(self):
  211. conn = self.app.pool.acquire(block=True)
  212. pub = self.app.amqp.TaskPublisher(conn, auto_declare=False)
  213. conn._publisher_chan = pub.channel
  214. return pub
  215. def new(self):
  216. return promise(self.create_publisher)
  217. def setup(self):
  218. if self.limit:
  219. for _ in xrange(self.limit):
  220. self._resource.put_nowait(self.new())
  221. def prepare(self, publisher):
  222. pub = maybe_promise(publisher)
  223. if not pub.connection:
  224. pub.connection = self.app.pool.acquire(block=True)
  225. if not getattr(pub.connection, "_publisher_chan", None):
  226. pub.connection._publisher_chan = pub.connection.channel()
  227. pub.revive(pub.connection._publisher_chan)
  228. return pub
  229. def release(self, resource):
  230. resource.connection.release()
  231. resource.connection = None
  232. super(PublisherPool, self).release(resource)
  233. class AMQP(object):
  234. BrokerConnection = BrokerConnection
  235. Publisher = messaging.Publisher
  236. Consumer = messaging.Consumer
  237. ConsumerSet = messaging.ConsumerSet
  238. #: Cached and prepared routing table.
  239. _rtable = None
  240. def __init__(self, app):
  241. self.app = app
  242. def flush_routes(self):
  243. self._rtable = _routes.prepare(self.app.conf.CELERY_ROUTES)
  244. def Queues(self, queues):
  245. """Create new :class:`Queues` instance, using queue defaults
  246. from the current configuration."""
  247. conf = self.app.conf
  248. if not queues:
  249. queues = {conf.CELERY_DEFAULT_QUEUE: {
  250. "exchange": conf.CELERY_DEFAULT_EXCHANGE,
  251. "exchange_type": conf.CELERY_DEFAULT_EXCHANGE_TYPE,
  252. "binding_key": conf.CELERY_DEFAULT_ROUTING_KEY}}
  253. return Queues.with_defaults(queues, conf.CELERY_DEFAULT_EXCHANGE,
  254. conf.CELERY_DEFAULT_EXCHANGE_TYPE)
  255. def Router(self, queues=None, create_missing=None):
  256. """Returns the current task router."""
  257. return _routes.Router(self.routes, queues or self.queues,
  258. self.app.either("CELERY_CREATE_MISSING_QUEUES",
  259. create_missing), app=self.app)
  260. def TaskConsumer(self, *args, **kwargs):
  261. """Returns consumer for a single task queue."""
  262. default_queue_name, default_queue = self.get_default_queue()
  263. defaults = dict({"queue": default_queue_name}, **default_queue)
  264. defaults["routing_key"] = defaults.pop("binding_key", None)
  265. return self.Consumer(*args,
  266. **self.app.merge(defaults, kwargs))
  267. def TaskPublisher(self, *args, **kwargs):
  268. """Returns publisher used to send tasks.
  269. You should use `app.send_task` instead.
  270. """
  271. conf = self.app.conf
  272. _, default_queue = self.get_default_queue()
  273. defaults = {"exchange": default_queue["exchange"],
  274. "exchange_type": default_queue["exchange_type"],
  275. "routing_key": conf.CELERY_DEFAULT_ROUTING_KEY,
  276. "serializer": conf.CELERY_TASK_SERIALIZER,
  277. "retry": conf.CELERY_TASK_PUBLISH_RETRY,
  278. "retry_policy": conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
  279. "app": self}
  280. return TaskPublisher(*args, **self.app.merge(defaults, kwargs))
  281. def get_task_consumer(self, connection, queues=None, **kwargs):
  282. """Return consumer configured to consume from all known task
  283. queues."""
  284. return self.ConsumerSet(connection,
  285. from_dict=queues or self.queues.consume_from,
  286. **kwargs)
  287. def get_default_queue(self):
  288. """Returns `(queue_name, queue_options)` tuple for the queue
  289. configured to be default (:setting:`CELERY_DEFAULT_QUEUE`)."""
  290. q = self.app.conf.CELERY_DEFAULT_QUEUE
  291. return q, self.queues[q]
  292. @cached_property
  293. def queues(self):
  294. """Queue name⇒ declaration mapping."""
  295. return self.Queues(self.app.conf.CELERY_QUEUES)
  296. @property
  297. def routes(self):
  298. if self._rtable is None:
  299. self.flush_routes()
  300. return self._rtable
  301. @cached_property
  302. def publisher_pool(self):
  303. return PublisherPool(app=self.app)