__init__.py 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.events
  4. ~~~~~~~~~~~~~
  5. Events is a stream of messages sent for certain actions occurring
  6. in the worker (and clients if :setting:`CELERY_SEND_TASK_SENT_EVENT`
  7. is enabled), used for monitoring purposes.
  8. """
  9. from __future__ import absolute_import
  10. from __future__ import with_statement
  11. import time
  12. import socket
  13. import threading
  14. from collections import deque
  15. from contextlib import contextmanager
  16. from copy import copy
  17. from kombu import eventloop, Exchange, Queue, Consumer, Producer
  18. from kombu.utils import cached_property
  19. from celery.app import app_or_default
  20. from celery.utils import uuid
  21. event_exchange = Exchange('celeryev', type='topic')
  22. def get_exchange(conn):
  23. ex = copy(event_exchange)
  24. if conn.transport.driver_type == 'redis':
  25. # quick hack for Issue #436
  26. ex.type = 'fanout'
  27. return ex
  28. def Event(type, _fields=None, **fields):
  29. """Create an event.
  30. An event is a dictionary, the only required field is ``type``.
  31. """
  32. event = dict(_fields or {}, type=type, **fields)
  33. if 'timestamp' not in event:
  34. event['timestamp'] = time.time()
  35. return event
  36. class EventDispatcher(object):
  37. """Send events as messages.
  38. :param connection: Connection to the broker.
  39. :keyword hostname: Hostname to identify ourselves as,
  40. by default uses the hostname returned by :func:`socket.gethostname`.
  41. :keyword enabled: Set to :const:`False` to not actually publish any events,
  42. making :meth:`send` a noop operation.
  43. :keyword channel: Can be used instead of `connection` to specify
  44. an exact channel to use when sending events.
  45. :keyword buffer_while_offline: If enabled events will be buffered
  46. while the connection is down. :meth:`flush` must be called
  47. as soon as the connection is re-established.
  48. You need to :meth:`close` this after use.
  49. """
  50. DISABLED_TRANSPORTS = set(['sql'])
  51. def __init__(self, connection=None, hostname=None, enabled=True,
  52. channel=None, buffer_while_offline=True, app=None,
  53. serializer=None):
  54. self.app = app_or_default(app or self.app)
  55. self.connection = connection
  56. self.channel = channel
  57. self.hostname = hostname or socket.gethostname()
  58. self.buffer_while_offline = buffer_while_offline
  59. self.mutex = threading.Lock()
  60. self.producer = None
  61. self._outbound_buffer = deque()
  62. self.serializer = serializer or self.app.conf.CELERY_EVENT_SERIALIZER
  63. self.on_enabled = set()
  64. self.on_disabled = set()
  65. self.enabled = enabled
  66. if not connection and channel:
  67. self.connection = channel.connection.client
  68. self.enabled = enabled
  69. conninfo = self.connection or self.app.connection()
  70. self.exchange = get_exchange(conninfo)
  71. if conninfo.transport.driver_type in self.DISABLED_TRANSPORTS:
  72. self.enabled = False
  73. if self.enabled:
  74. self.enable()
  75. def __enter__(self):
  76. return self
  77. def __exit__(self, *exc_info):
  78. self.close()
  79. def enable(self):
  80. self.producer = Producer(self.channel or self.connection,
  81. exchange=self.exchange,
  82. serializer=self.serializer)
  83. self.enabled = True
  84. for callback in self.on_enabled:
  85. callback()
  86. def disable(self):
  87. if self.enabled:
  88. self.enabled = False
  89. self.close()
  90. for callback in self.on_disabled:
  91. callback()
  92. def publish(self, type, fields, producer, retry=False, retry_policy=None):
  93. with self.mutex:
  94. event = Event(type, hostname=self.hostname,
  95. clock=self.app.clock.forward(), **fields)
  96. exchange = self.exchange
  97. producer.publish(
  98. event,
  99. routing_key=type.replace('-', '.'),
  100. exchange=exchange.name,
  101. retry=retry,
  102. retry_policy=retry_policy,
  103. declare=[exchange],
  104. serializer=self.serializer,
  105. )
  106. def send(self, type, **fields):
  107. """Send event.
  108. :param type: Kind of event.
  109. :keyword \*\*fields: Event arguments.
  110. """
  111. if self.enabled:
  112. try:
  113. self.publish(type, fields, self.producer)
  114. except Exception, exc:
  115. if not self.buffer_while_offline:
  116. raise
  117. self._outbound_buffer.append((type, fields, exc))
  118. def flush(self):
  119. while self._outbound_buffer:
  120. try:
  121. type, fields, _ = self._outbound_buffer.popleft()
  122. except IndexError:
  123. return
  124. self.send(type, **fields)
  125. def copy_buffer(self, other):
  126. self._outbound_buffer = other._outbound_buffer
  127. def close(self):
  128. """Close the event dispatcher."""
  129. self.mutex.locked() and self.mutex.release()
  130. self.producer = None
  131. def _get_publisher(self):
  132. return self.producer
  133. def _set_publisher(self, producer):
  134. self.producer = producer
  135. publisher = property(_get_publisher, _set_publisher) # XXX compat
  136. class EventReceiver(object):
  137. """Capture events.
  138. :param connection: Connection to the broker.
  139. :keyword handlers: Event handlers.
  140. :attr:`handlers` is a dict of event types and their handlers,
  141. the special handler `"*"` captures all events that doesn't have a
  142. handler.
  143. """
  144. handlers = {}
  145. def __init__(self, connection, handlers=None, routing_key='#',
  146. node_id=None, app=None, queue_prefix='celeryev'):
  147. self.app = app_or_default(app)
  148. self.connection = connection
  149. if handlers is not None:
  150. self.handlers = handlers
  151. self.routing_key = routing_key
  152. self.node_id = node_id or uuid()
  153. self.queue_prefix = queue_prefix
  154. self.exchange = get_exchange(self.connection or self.app.connection())
  155. self.queue = Queue('.'.join([self.queue_prefix, self.node_id]),
  156. exchange=self.exchange,
  157. routing_key=self.routing_key,
  158. auto_delete=True,
  159. durable=False)
  160. def process(self, type, event):
  161. """Process the received event by dispatching it to the appropriate
  162. handler."""
  163. handler = self.handlers.get(type) or self.handlers.get('*')
  164. handler and handler(event)
  165. @contextmanager
  166. def consumer(self, wakeup=True):
  167. """Create event consumer."""
  168. consumer = Consumer(self.connection,
  169. queues=[self.queue], no_ack=True,
  170. accept=['application/json'])
  171. consumer.register_callback(self._receive)
  172. consumer.consume()
  173. try:
  174. if wakeup:
  175. self.wakeup_workers(channel=consumer.channel)
  176. yield consumer
  177. finally:
  178. try:
  179. consumer.cancel()
  180. except self.connection.connection_errors:
  181. pass
  182. def itercapture(self, limit=None, timeout=None, wakeup=True):
  183. with self.consumer(wakeup=wakeup) as consumer:
  184. yield consumer
  185. self.drain_events(limit=limit, timeout=timeout)
  186. def capture(self, limit=None, timeout=None, wakeup=True):
  187. """Open up a consumer capturing events.
  188. This has to run in the main process, and it will never
  189. stop unless forced via :exc:`KeyboardInterrupt` or :exc:`SystemExit`.
  190. """
  191. list(self.itercapture(limit=limit, timeout=timeout, wakeup=wakeup))
  192. def wakeup_workers(self, channel=None):
  193. self.app.control.broadcast('heartbeat',
  194. connection=self.connection,
  195. channel=channel)
  196. def drain_events(self, **kwargs):
  197. for _ in eventloop(self.connection, **kwargs):
  198. pass
  199. def _receive(self, body, message):
  200. type = body.pop('type').lower()
  201. clock = body.get('clock')
  202. if clock:
  203. self.app.clock.adjust(clock)
  204. self.process(type, Event(type, body))
  205. class Events(object):
  206. def __init__(self, app=None):
  207. self.app = app
  208. @cached_property
  209. def Receiver(self):
  210. return self.app.subclass_with_self(EventReceiver,
  211. reverse='events.Receiver')
  212. @cached_property
  213. def Dispatcher(self):
  214. return self.app.subclass_with_self(EventDispatcher,
  215. reverse='events.Dispatcher')
  216. @cached_property
  217. def State(self):
  218. return self.app.subclass_with_self('celery.events.state:State',
  219. reverse='events.State')
  220. @contextmanager
  221. def default_dispatcher(self, hostname=None, enabled=True,
  222. buffer_while_offline=False):
  223. with self.app.amqp.producer_pool.acquire(block=True) as pub:
  224. with self.Dispatcher(pub.connection, hostname, enabled,
  225. pub.channel, buffer_while_offline) as d:
  226. yield d