job.py 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218
  1. """
  2. Jobs Executable by the Worker Server.
  3. """
  4. from celery.registry import tasks
  5. from celery.exceptions import NotRegistered
  6. from celery.execute import ExecuteWrapper
  7. from celery.utils import noop, fun_takes_kwargs
  8. from django.core.mail import mail_admins
  9. import multiprocessing
  10. import socket
  11. import sys
  12. # pep8.py borks on a inline signature separator and
  13. # says "trailing whitespace" ;)
  14. EMAIL_SIGNATURE_SEP = "-- "
  15. TASK_FAIL_EMAIL_BODY = """
  16. Task %%(name)s with id %%(id)s raised exception: %%(exc)s
  17. Task was called with args:%%(args)s kwargs:%%(kwargs)s.
  18. The contents of the full traceback was:
  19. %%(traceback)s
  20. %(EMAIL_SIGNATURE_SEP)s
  21. Just thought I'd let you know!
  22. celeryd at %%(hostname)s.
  23. """ % {"EMAIL_SIGNATURE_SEP": EMAIL_SIGNATURE_SEP}
  24. class TaskWrapper(object):
  25. """Class wrapping a task to be run.
  26. :param task_name: see :attr:`task_name`.
  27. :param task_id: see :attr:`task_id`.
  28. :param task_func: see :attr:`task_func`
  29. :param args: see :attr:`args`
  30. :param kwargs: see :attr:`kwargs`.
  31. .. attribute:: task_name
  32. Kind of task. Must be a name registered in the task registry.
  33. .. attribute:: task_id
  34. UUID of the task.
  35. .. attribute:: task_func
  36. The tasks callable object.
  37. .. attribute:: args
  38. List of positional arguments to apply to the task.
  39. .. attribute:: kwargs
  40. Mapping of keyword arguments to apply to the task.
  41. .. attribute:: message
  42. The original message sent. Used for acknowledging the message.
  43. """
  44. success_msg = "Task %(name)s[%(id)s] processed: %(return_value)s"
  45. fail_msg = """
  46. Task %(name)s[%(id)s] raised exception: %(exc)s\n%(traceback)s
  47. """
  48. fail_email_subject = """
  49. [celery@%(hostname)s] Error: Task %(name)s (%(id)s): %(exc)s
  50. """
  51. fail_email_body = TASK_FAIL_EMAIL_BODY
  52. def __init__(self, task_name, task_id, task_func, args, kwargs,
  53. on_ack=noop, retries=0, **opts):
  54. self.task_name = task_name
  55. self.task_id = task_id
  56. self.task_func = task_func
  57. self.retries = retries
  58. self.args = args
  59. self.kwargs = kwargs
  60. self.logger = kwargs.get("logger")
  61. self.on_ack = on_ack
  62. for opt in ("success_msg", "fail_msg", "fail_email_subject",
  63. "fail_email_body"):
  64. setattr(self, opt, opts.get(opt, getattr(self, opt, None)))
  65. if not self.logger:
  66. self.logger = multiprocessing.get_logger()
  67. def __repr__(self):
  68. return '<%s: {name:"%s", id:"%s", args:"%s", kwargs:"%s"}>' % (
  69. self.__class__.__name__,
  70. self.task_name, self.task_id,
  71. self.args, self.kwargs)
  72. @classmethod
  73. def from_message(cls, message, message_data, logger=None):
  74. """Create a :class:`TaskWrapper` from a task message sent by
  75. :class:`celery.messaging.TaskPublisher`.
  76. :raises UnknownTaskError: if the message does not describe a task,
  77. the message is also rejected.
  78. :returns: :class:`TaskWrapper` instance.
  79. """
  80. task_name = message_data["task"]
  81. task_id = message_data["id"]
  82. args = message_data["args"]
  83. kwargs = message_data["kwargs"]
  84. retries = message_data.get("retries", 0)
  85. # Convert any unicode keys in the keyword arguments to ascii.
  86. kwargs = dict((key.encode("utf-8"), value)
  87. for key, value in kwargs.items())
  88. if task_name not in tasks:
  89. raise NotRegistered(task_name)
  90. task_func = tasks[task_name]
  91. return cls(task_name, task_id, task_func, args, kwargs,
  92. retries=retries, on_ack=message.ack, logger=logger)
  93. def extend_with_default_kwargs(self, loglevel, logfile):
  94. """Extend the tasks keyword arguments with standard task arguments.
  95. Currently these are ``logfile``, ``loglevel``, ``task_id``,
  96. ``task_name`` and ``task_retries``.
  97. See :meth:`celery.task.base.Task.run` for more information.
  98. """
  99. kwargs = dict(self.kwargs)
  100. default_kwargs = {"logfile": logfile,
  101. "loglevel": loglevel,
  102. "task_id": self.task_id,
  103. "task_name": self.task_name,
  104. "task_retries": self.retries}
  105. fun = getattr(self.task_func, "run", self.task_func)
  106. supported_keys = fun_takes_kwargs(fun, default_kwargs)
  107. extend_with = dict((key, val) for key, val in default_kwargs.items()
  108. if key in supported_keys)
  109. kwargs.update(extend_with)
  110. return kwargs
  111. def _executeable(self, loglevel=None, logfile=None):
  112. """Get the :class:`celery.execute.ExecuteWrapper` for this task."""
  113. task_func_kwargs = self.extend_with_default_kwargs(loglevel, logfile)
  114. return ExecuteWrapper(self.task_func, self.task_id, self.task_name,
  115. self.args, task_func_kwargs)
  116. def execute(self, loglevel=None, logfile=None):
  117. """Execute the task in a :class:`celery.execute.ExecuteWrapper`.
  118. :keyword loglevel: The loglevel used by the task.
  119. :keyword logfile: The logfile used by the task.
  120. """
  121. # acknowledge task as being processed.
  122. self.on_ack()
  123. return self._executeable(loglevel, logfile)()
  124. def execute_using_pool(self, pool, loglevel=None, logfile=None):
  125. """Like :meth:`execute`, but using the :mod:`multiprocessing` pool.
  126. :param pool: A :class:`multiprocessing.Pool` instance.
  127. :keyword loglevel: The loglevel used by the task.
  128. :keyword logfile: The logfile used by the task.
  129. :returns :class:`multiprocessing.AsyncResult` instance.
  130. """
  131. wrapper = self._executeable(loglevel, logfile)
  132. return pool.apply_async(wrapper,
  133. callbacks=[self.on_success], errbacks=[self.on_failure],
  134. on_ack=self.on_ack)
  135. def on_success(self, ret_value):
  136. """The handler used if the task was successfully processed (
  137. without raising an exception)."""
  138. msg = self.success_msg.strip() % {
  139. "id": self.task_id,
  140. "name": self.task_name,
  141. "return_value": ret_value}
  142. self.logger.info(msg)
  143. def on_failure(self, exc_info):
  144. """The handler used if the task raised an exception."""
  145. from celery.conf import SEND_CELERY_TASK_ERROR_EMAILS
  146. context = {
  147. "hostname": socket.gethostname(),
  148. "id": self.task_id,
  149. "name": self.task_name,
  150. "exc": exc_info.exception,
  151. "traceback": exc_info.traceback,
  152. "args": self.args,
  153. "kwargs": self.kwargs,
  154. }
  155. self.logger.error(self.fail_msg.strip() % context)
  156. task_obj = tasks.get(self.task_name, object)
  157. send_error_email = SEND_CELERY_TASK_ERROR_EMAILS and not \
  158. getattr(task_obj, "disable_error_emails", False)
  159. if send_error_email:
  160. subject = self.fail_email_subject.strip() % context
  161. body = self.fail_email_body.strip() % context
  162. mail_admins(subject, body, fail_silently=True)