task.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.app.task
  4. ~~~~~~~~~~~~~~~
  5. Task Implementation: Task request context, and the base task class.
  6. """
  7. from __future__ import absolute_import
  8. from __future__ import with_statement
  9. import os
  10. import sys
  11. from kombu.utils import cached_property
  12. from celery import current_app
  13. from celery import states
  14. from celery.__compat__ import class_property
  15. from celery.state import get_current_worker_task, _task_stack
  16. from celery.datastructures import ExceptionInfo
  17. from celery.exceptions import MaxRetriesExceededError, RetryTaskError
  18. from celery.result import EagerResult
  19. from celery.utils import fun_takes_kwargs, uuid, maybe_reraise
  20. from celery.utils.functional import mattrgetter, maybe_list
  21. from celery.utils.imports import instantiate
  22. from celery.utils.mail import ErrorMail
  23. from .annotations import resolve_all as resolve_all_annotations
  24. from .registry import _unpickle_task
  25. #: extracts attributes related to publishing a message from an object.
  26. extract_exec_options = mattrgetter(
  27. 'queue', 'routing_key', 'exchange',
  28. 'immediate', 'mandatory', 'priority', 'expires',
  29. 'serializer', 'delivery_mode', 'compression',
  30. )
  31. #: Billiard sets this when execv is enabled.
  32. #: We use it to find out the name of the original ``__main__``
  33. #: module, so that we can properly rewrite the name of the
  34. #: task to be that of ``App.main``.
  35. MP_MAIN_FILE = os.environ.get('MP_MAIN_FILE') or None
  36. class Context(object):
  37. # Default context
  38. logfile = None
  39. loglevel = None
  40. hostname = None
  41. id = None
  42. args = None
  43. kwargs = None
  44. retries = 0
  45. is_eager = False
  46. delivery_info = None
  47. taskset = None # compat alias to group
  48. group = None
  49. chord = None
  50. called_directly = True
  51. callbacks = None
  52. errbacks = None
  53. _children = None # see property
  54. def __init__(self, *args, **kwargs):
  55. self.update(*args, **kwargs)
  56. def update(self, *args, **kwargs):
  57. self.__dict__.update(*args, **kwargs)
  58. def clear(self):
  59. self.__dict__.clear()
  60. def get(self, key, default=None):
  61. try:
  62. return getattr(self, key)
  63. except AttributeError:
  64. return default
  65. def __repr__(self):
  66. return '<Context: %r>' % (vars(self, ))
  67. @property
  68. def children(self):
  69. # children must be an empy list for every thread
  70. if self._children is None:
  71. self._children = []
  72. return self._children
  73. class TaskType(type):
  74. """Meta class for tasks.
  75. Automatically registers the task in the task registry, except
  76. if the `abstract` attribute is set.
  77. If no `name` attribute is provided, then no name is automatically
  78. set to the name of the module it was defined in, and the class name.
  79. """
  80. def __new__(cls, name, bases, attrs):
  81. new = super(TaskType, cls).__new__
  82. task_module = attrs.get('__module__') or '__main__'
  83. # - Abstract class: abstract attribute should not be inherited.
  84. if attrs.pop('abstract', None) or not attrs.get('autoregister', True):
  85. return new(cls, name, bases, attrs)
  86. # The 'app' attribute is now a property, with the real app located
  87. # in the '_app' attribute. Previously this was a regular attribute,
  88. # so we should support classes defining it.
  89. _app1, _app2 = attrs.pop('_app', None), attrs.pop('app', None)
  90. app = attrs['_app'] = _app1 or _app2 or current_app
  91. # - Automatically generate missing/empty name.
  92. autoname = False
  93. if not attrs.get('name'):
  94. try:
  95. module_name = sys.modules[task_module].__name__
  96. except KeyError: # pragma: no cover
  97. # Fix for manage.py shell_plus (Issue #366).
  98. module_name = task_module
  99. attrs['name'] = '.'.join(filter(None, [module_name, name]))
  100. autoname = True
  101. # - Create and register class.
  102. # Because of the way import happens (recursively)
  103. # we may or may not be the first time the task tries to register
  104. # with the framework. There should only be one class for each task
  105. # name, so we always return the registered version.
  106. tasks = app._tasks
  107. # - If the task module is used as the __main__ script
  108. # - we need to rewrite the module part of the task name
  109. # - to match App.main.
  110. if MP_MAIN_FILE and sys.modules[task_module].__file__ == MP_MAIN_FILE:
  111. # - see comment about :envvar:`MP_MAIN_FILE` above.
  112. task_module = '__main__'
  113. if autoname and task_module == '__main__' and app.main:
  114. attrs['name'] = '.'.join([app.main, name])
  115. task_name = attrs['name']
  116. if task_name not in tasks:
  117. tasks.register(new(cls, name, bases, attrs))
  118. instance = tasks[task_name]
  119. instance.bind(app)
  120. return instance.__class__
  121. def __repr__(cls):
  122. if cls._app:
  123. return '<class %s of %s>' % (cls.__name__, cls._app, )
  124. return '<unbound %s>' % (cls.__name__, )
  125. class Task(object):
  126. """Task base class.
  127. When called tasks apply the :meth:`run` method. This method must
  128. be defined by all tasks (that is unless the :meth:`__call__` method
  129. is overridden).
  130. """
  131. __metaclass__ = TaskType
  132. __trace__ = None
  133. ErrorMail = ErrorMail
  134. MaxRetriesExceededError = MaxRetriesExceededError
  135. #: Execution strategy used, or the qualified name of one.
  136. Strategy = 'celery.worker.strategy:default'
  137. #: This is the instance bound to if the task is a method of a class.
  138. __self__ = None
  139. #: The application instance associated with this task class.
  140. _app = None
  141. #: Name of the task.
  142. name = None
  143. #: If :const:`True` the task is an abstract base class.
  144. abstract = True
  145. #: If disabled the worker will not forward magic keyword arguments.
  146. #: Deprecated and scheduled for removal in v3.0.
  147. accept_magic_kwargs = None
  148. #: Maximum number of retries before giving up. If set to :const:`None`,
  149. #: it will **never** stop retrying.
  150. max_retries = 3
  151. #: Default time in seconds before a retry of the task should be
  152. #: executed. 3 minutes by default.
  153. default_retry_delay = 3 * 60
  154. #: Rate limit for this task type. Examples: :const:`None` (no rate
  155. #: limit), `'100/s'` (hundred tasks a second), `'100/m'` (hundred tasks
  156. #: a minute),`'100/h'` (hundred tasks an hour)
  157. rate_limit = None
  158. #: If enabled the worker will not store task state and return values
  159. #: for this task. Defaults to the :setting:`CELERY_IGNORE_RESULT`
  160. #: setting.
  161. ignore_result = False
  162. #: When enabled errors will be stored even if the task is otherwise
  163. #: configured to ignore results.
  164. store_errors_even_if_ignored = False
  165. #: If enabled an email will be sent to :setting:`ADMINS` whenever a task
  166. #: of this type fails.
  167. send_error_emails = False
  168. #: The name of a serializer that are registered with
  169. #: :mod:`kombu.serialization.registry`. Default is `'pickle'`.
  170. serializer = None
  171. #: Hard time limit.
  172. #: Defaults to the :setting:`CELERY_TASK_TIME_LIMIT` setting.
  173. time_limit = None
  174. #: Soft time limit.
  175. #: Defaults to the :setting:`CELERY_TASK_SOFT_TIME_LIMIT` setting.
  176. soft_time_limit = None
  177. #: The result store backend used for this task.
  178. backend = None
  179. #: If disabled this task won't be registered automatically.
  180. autoregister = True
  181. #: If enabled the task will report its status as 'started' when the task
  182. #: is executed by a worker. Disabled by default as the normal behaviour
  183. #: is to not report that level of granularity. Tasks are either pending,
  184. #: finished, or waiting to be retried.
  185. #:
  186. #: Having a 'started' status can be useful for when there are long
  187. #: running tasks and there is a need to report which task is currently
  188. #: running.
  189. #:
  190. #: The application default can be overridden using the
  191. #: :setting:`CELERY_TRACK_STARTED` setting.
  192. track_started = False
  193. #: When enabled messages for this task will be acknowledged **after**
  194. #: the task has been executed, and not *just before* which is the
  195. #: default behavior.
  196. #:
  197. #: Please note that this means the task may be executed twice if the
  198. #: worker crashes mid execution (which may be acceptable for some
  199. #: applications).
  200. #:
  201. #: The application default can be overridden with the
  202. #: :setting:`CELERY_ACKS_LATE` setting.
  203. acks_late = None
  204. #: Default task expiry time.
  205. expires = None
  206. __bound__ = False
  207. from_config = (
  208. ('send_error_emails', 'CELERY_SEND_TASK_ERROR_EMAILS'),
  209. ('serializer', 'CELERY_TASK_SERIALIZER'),
  210. ('rate_limit', 'CELERY_DEFAULT_RATE_LIMIT'),
  211. ('track_started', 'CELERY_TRACK_STARTED'),
  212. ('acks_late', 'CELERY_ACKS_LATE'),
  213. ('ignore_result', 'CELERY_IGNORE_RESULT'),
  214. ('store_errors_even_if_ignored',
  215. 'CELERY_STORE_ERRORS_EVEN_IF_IGNORED'),
  216. )
  217. __bound__ = False
  218. # - Tasks are lazily bound, so that configuration is not set
  219. # - until the task is actually used
  220. @classmethod
  221. def bind(self, app):
  222. was_bound, self.__bound__ = self.__bound__, True
  223. self._app = app
  224. conf = app.conf
  225. for attr_name, config_name in self.from_config:
  226. if getattr(self, attr_name, None) is None:
  227. setattr(self, attr_name, conf[config_name])
  228. self.accept_magic_kwargs = app.accept_magic_kwargs
  229. if self.accept_magic_kwargs is None:
  230. self.accept_magic_kwargs = app.accept_magic_kwargs
  231. if self.backend is None:
  232. self.backend = app.backend
  233. # decorate with annotations from config.
  234. if not was_bound:
  235. self.annotate()
  236. from celery.utils.threads import LocalStack
  237. self.request_stack = LocalStack()
  238. self.request_stack.push(Context())
  239. # PeriodicTask uses this to add itself to the PeriodicTask schedule.
  240. self.on_bound(app)
  241. return app
  242. @classmethod
  243. def on_bound(self, app):
  244. """This method can be defined to do additional actions when the
  245. task class is bound to an app."""
  246. pass
  247. @classmethod
  248. def _get_app(self):
  249. if not self.__bound__ or self._app is None:
  250. # The app property's __set__ method is not called
  251. # if Task.app is set (on the class), so must bind on use.
  252. self.bind(current_app)
  253. return self._app
  254. app = class_property(_get_app, bind)
  255. @classmethod
  256. def annotate(self):
  257. for d in resolve_all_annotations(self.app.annotations, self):
  258. for key, value in d.iteritems():
  259. if key.startswith('@'):
  260. self.add_around(key[1:], value)
  261. else:
  262. setattr(self, key, value)
  263. @classmethod
  264. def add_around(self, attr, around):
  265. orig = getattr(self, attr)
  266. if getattr(orig, '__wrapped__', None):
  267. orig = orig.__wrapped__
  268. meth = around(orig)
  269. meth.__wrapped__ = orig
  270. setattr(self, attr, meth)
  271. def __call__(self, *args, **kwargs):
  272. _task_stack.push(self)
  273. self.push_request()
  274. try:
  275. return self.run(*args, **kwargs)
  276. finally:
  277. self.pop_request()
  278. _task_stack.pop()
  279. # - tasks are pickled into the name of the task only, and the reciever
  280. # - simply grabs it from the local registry.
  281. def __reduce__(self):
  282. return (_unpickle_task, (self.name, ), None)
  283. def run(self, *args, **kwargs):
  284. """The body of the task executed by workers."""
  285. raise NotImplementedError('Tasks must define the run method.')
  286. def start_strategy(self, app, consumer):
  287. return instantiate(self.Strategy, self, app, consumer)
  288. def delay(self, *args, **kwargs):
  289. """Star argument version of :meth:`apply_async`.
  290. Does not support the extra options enabled by :meth:`apply_async`.
  291. :param \*args: positional arguments passed on to the task.
  292. :param \*\*kwargs: keyword arguments passed on to the task.
  293. :returns :class:`celery.result.AsyncResult`:
  294. """
  295. return self.apply_async(args, kwargs)
  296. def apply_async(self, args=None, kwargs=None,
  297. task_id=None, producer=None, connection=None, router=None,
  298. link=None, link_error=None, publisher=None, add_to_parent=True,
  299. **options):
  300. """Apply tasks asynchronously by sending a message.
  301. :keyword args: The positional arguments to pass on to the
  302. task (a :class:`list` or :class:`tuple`).
  303. :keyword kwargs: The keyword arguments to pass on to the
  304. task (a :class:`dict`)
  305. :keyword countdown: Number of seconds into the future that the
  306. task should execute. Defaults to immediate
  307. execution (do not confuse with the
  308. `immediate` flag, as they are unrelated).
  309. :keyword eta: A :class:`~datetime.datetime` object describing
  310. the absolute time and date of when the task should
  311. be executed. May not be specified if `countdown`
  312. is also supplied. (Do not confuse this with the
  313. `immediate` flag, as they are unrelated).
  314. :keyword expires: Either a :class:`int`, describing the number of
  315. seconds, or a :class:`~datetime.datetime` object
  316. that describes the absolute time and date of when
  317. the task should expire. The task will not be
  318. executed after the expiration time.
  319. :keyword connection: Re-use existing broker connection instead
  320. of establishing a new one.
  321. :keyword retry: If enabled sending of the task message will be retried
  322. in the event of connection loss or failure. Default
  323. is taken from the :setting:`CELERY_TASK_PUBLISH_RETRY`
  324. setting. Note you need to handle the
  325. producer/connection manually for this to work.
  326. :keyword retry_policy: Override the retry policy used. See the
  327. :setting:`CELERY_TASK_PUBLISH_RETRY` setting.
  328. :keyword routing_key: The routing key used to route the task to a
  329. worker server. Defaults to the
  330. :attr:`routing_key` attribute.
  331. :keyword exchange: The named exchange to send the task to.
  332. Defaults to the :attr:`exchange` attribute.
  333. :keyword exchange_type: The exchange type to initialize the exchange
  334. if not already declared. Defaults to the
  335. :attr:`exchange_type` attribute.
  336. :keyword immediate: Request immediate delivery. Will raise an
  337. exception if the task cannot be routed to a worker
  338. immediately. (Do not confuse this parameter with
  339. the `countdown` and `eta` settings, as they are
  340. unrelated). Defaults to the :attr:`immediate`
  341. attribute.
  342. :keyword mandatory: Mandatory routing. Raises an exception if
  343. there's no running workers able to take on this
  344. task. Defaults to the :attr:`mandatory`
  345. attribute.
  346. :keyword priority: The task priority, a number between 0 and 9.
  347. Defaults to the :attr:`priority` attribute.
  348. :keyword serializer: A string identifying the default
  349. serialization method to use. Can be `pickle`,
  350. `json`, `yaml`, `msgpack` or any custom
  351. serialization method that has been registered
  352. with :mod:`kombu.serialization.registry`.
  353. Defaults to the :attr:`serializer` attribute.
  354. :keyword compression: A string identifying the compression method
  355. to use. Can be one of ``zlib``, ``bzip2``,
  356. or any custom compression methods registered with
  357. :func:`kombu.compression.register`. Defaults to
  358. the :setting:`CELERY_MESSAGE_COMPRESSION`
  359. setting.
  360. :keyword link: A single, or a list of subtasks to apply if the
  361. task exits successfully.
  362. :keyword link_error: A single, or a list of subtasks to apply
  363. if an error occurs while executing the task.
  364. :keyword producer: :class:~@amqp.TaskProducer` instance to use.
  365. :keyword add_to_parent: If set to True (default) and the task
  366. is applied while executing another task, then the result
  367. will be appended to the parent tasks ``request.children``
  368. attribute.
  369. :keyword publisher: Deprecated alias to ``producer``.
  370. .. note::
  371. If the :setting:`CELERY_ALWAYS_EAGER` setting is set, it will
  372. be replaced by a local :func:`apply` call instead.
  373. """
  374. producer = producer or publisher
  375. app = self._get_app()
  376. router = router or self.app.amqp.router
  377. conf = app.conf
  378. # add 'self' if this is a bound method.
  379. if self.__self__ is not None:
  380. args = (self.__self__, ) + tuple(args)
  381. if conf.CELERY_ALWAYS_EAGER:
  382. return self.apply(args, kwargs, task_id=task_id, **options)
  383. options = dict(extract_exec_options(self), **options)
  384. options = router.route(options, self.name, args, kwargs)
  385. if connection:
  386. producer = app.amqp.TaskProducer(connection)
  387. with app.default_producer(producer) as P:
  388. evd = None
  389. if conf.CELERY_SEND_TASK_SENT_EVENT:
  390. evd = app.events.Dispatcher(channel=P.channel,
  391. buffer_while_offline=False)
  392. task_id = P.delay_task(self.name, args, kwargs,
  393. task_id=task_id,
  394. event_dispatcher=evd,
  395. callbacks=maybe_list(link),
  396. errbacks=maybe_list(link_error),
  397. **options)
  398. result = self.AsyncResult(task_id)
  399. if add_to_parent:
  400. parent = get_current_worker_task()
  401. if parent:
  402. parent.request.children.append(result)
  403. return result
  404. def retry(self, args=None, kwargs=None, exc=None, throw=True,
  405. eta=None, countdown=None, max_retries=None, **options):
  406. """Retry the task.
  407. :param args: Positional arguments to retry with.
  408. :param kwargs: Keyword arguments to retry with.
  409. :keyword exc: Optional exception to raise instead of
  410. :exc:`~celery.exceptions.MaxRetriesExceededError`
  411. when the max restart limit has been exceeded.
  412. :keyword countdown: Time in seconds to delay the retry for.
  413. :keyword eta: Explicit time and date to run the retry at
  414. (must be a :class:`~datetime.datetime` instance).
  415. :keyword max_retries: If set, overrides the default retry limit.
  416. :keyword \*\*options: Any extra options to pass on to
  417. meth:`apply_async`.
  418. :keyword throw: If this is :const:`False`, do not raise the
  419. :exc:`~celery.exceptions.RetryTaskError` exception,
  420. that tells the worker to mark the task as being
  421. retried. Note that this means the task will be
  422. marked as failed if the task raises an exception,
  423. or successful if it returns.
  424. :raises celery.exceptions.RetryTaskError: To tell the worker that
  425. the task has been re-sent for retry. This always happens,
  426. unless the `throw` keyword argument has been explicitly set
  427. to :const:`False`, and is considered normal operation.
  428. **Example**
  429. .. code-block:: python
  430. >>> @task()
  431. >>> def tweet(auth, message):
  432. ... twitter = Twitter(oauth=auth)
  433. ... try:
  434. ... twitter.post_status_update(message)
  435. ... except twitter.FailWhale, exc:
  436. ... # Retry in 5 minutes.
  437. ... raise tweet.retry(countdown=60 * 5, exc=exc)
  438. Although the task will never return above as `retry` raises an
  439. exception to notify the worker, we use `return` in front of the retry
  440. to convey that the rest of the block will not be executed.
  441. """
  442. request = self.request
  443. max_retries = self.max_retries if max_retries is None else max_retries
  444. args = request.args if args is None else args
  445. kwargs = request.kwargs if kwargs is None else kwargs
  446. delivery_info = request.delivery_info
  447. # Not in worker or emulated by (apply/always_eager),
  448. # so just raise the original exception.
  449. if request.called_directly:
  450. maybe_reraise()
  451. raise exc or RetryTaskError('Task can be retried', None)
  452. if delivery_info:
  453. options.setdefault('exchange', delivery_info.get('exchange'))
  454. options.setdefault('routing_key', delivery_info.get('routing_key'))
  455. if not eta and countdown is None:
  456. countdown = self.default_retry_delay
  457. options.update({'retries': request.retries + 1,
  458. 'task_id': request.id,
  459. 'countdown': countdown,
  460. 'eta': eta})
  461. if max_retries is not None and options['retries'] > max_retries:
  462. if exc:
  463. maybe_reraise()
  464. raise self.MaxRetriesExceededError(
  465. """Can't retry %s[%s] args:%s kwargs:%s""" % (
  466. self.name, options['task_id'], args, kwargs))
  467. # If task was executed eagerly using apply(),
  468. # then the retry must also be executed eagerly.
  469. if request.is_eager:
  470. self.apply(args=args, kwargs=kwargs, **options).get()
  471. else:
  472. self.apply_async(args=args, kwargs=kwargs, **options)
  473. ret = RetryTaskError(eta and 'Retry at %s' % eta
  474. or 'Retry in %s secs.' % countdown, exc)
  475. if throw:
  476. raise ret
  477. return ret
  478. def apply(self, args=None, kwargs=None, **options):
  479. """Execute this task locally, by blocking until the task returns.
  480. :param args: positional arguments passed on to the task.
  481. :param kwargs: keyword arguments passed on to the task.
  482. :keyword throw: Re-raise task exceptions. Defaults to
  483. the :setting:`CELERY_EAGER_PROPAGATES_EXCEPTIONS`
  484. setting.
  485. :rtype :class:`celery.result.EagerResult`:
  486. """
  487. # trace imports Task, so need to import inline.
  488. from celery.task.trace import eager_trace_task
  489. app = self._get_app()
  490. args = args or []
  491. kwargs = kwargs or {}
  492. task_id = options.get('task_id') or uuid()
  493. retries = options.get('retries', 0)
  494. throw = app.either('CELERY_EAGER_PROPAGATES_EXCEPTIONS',
  495. options.pop('throw', None))
  496. # Make sure we get the task instance, not class.
  497. task = app._tasks[self.name]
  498. request = {'id': task_id,
  499. 'retries': retries,
  500. 'is_eager': True,
  501. 'logfile': options.get('logfile'),
  502. 'loglevel': options.get('loglevel', 0),
  503. 'delivery_info': {'is_eager': True}}
  504. if self.accept_magic_kwargs:
  505. default_kwargs = {'task_name': task.name,
  506. 'task_id': task_id,
  507. 'task_retries': retries,
  508. 'task_is_eager': True,
  509. 'logfile': options.get('logfile'),
  510. 'loglevel': options.get('loglevel', 0),
  511. 'delivery_info': {'is_eager': True}}
  512. supported_keys = fun_takes_kwargs(task.run, default_kwargs)
  513. extend_with = dict((key, val)
  514. for key, val in default_kwargs.items()
  515. if key in supported_keys)
  516. kwargs.update(extend_with)
  517. tb = None
  518. retval, info = eager_trace_task(task, task_id, args, kwargs,
  519. request=request, propagate=throw)
  520. if isinstance(retval, ExceptionInfo):
  521. retval, tb = retval.exception, retval.traceback
  522. state = states.SUCCESS if info is None else info.state
  523. return EagerResult(task_id, retval, state, traceback=tb)
  524. def AsyncResult(self, task_id):
  525. """Get AsyncResult instance for this kind of task.
  526. :param task_id: Task id to get result for.
  527. """
  528. return self._get_app().AsyncResult(task_id, backend=self.backend,
  529. task_name=self.name)
  530. def subtask(self, *args, **kwargs):
  531. """Returns :class:`~celery.subtask` object for
  532. this task, wrapping arguments and execution options
  533. for a single task invocation."""
  534. from celery.canvas import subtask
  535. return subtask(self, *args, **kwargs)
  536. def s(self, *args, **kwargs):
  537. """``.s(*a, **k) -> .subtask(a, k)``"""
  538. return self.subtask(args, kwargs)
  539. def si(self, *args, **kwargs):
  540. """``.si(*a, **k) -> .subtask(a, k, immutable=True)``"""
  541. return self.subtask(args, kwargs, immutable=True)
  542. def chunks(self, it, n):
  543. """Creates a :class:`~celery.canvas.chunks` task for this task."""
  544. from celery import chunks
  545. return chunks(self.s(), it, n)
  546. def map(self, it):
  547. """Creates a :class:`~celery.canvas.xmap` task from ``it``."""
  548. from celery import xmap
  549. return xmap(self.s(), it)
  550. def starmap(self, it):
  551. """Creates a :class:`~celery.canvas.xstarmap` task from ``it``."""
  552. from celery import xstarmap
  553. return xstarmap(self.s(), it)
  554. def update_state(self, task_id=None, state=None, meta=None):
  555. """Update task state.
  556. :keyword task_id: Id of the task to update, defaults to the
  557. id of the current task
  558. :keyword state: New state (:class:`str`).
  559. :keyword meta: State metadata (:class:`dict`).
  560. """
  561. if task_id is None:
  562. task_id = self.request.id
  563. self.backend.store_result(task_id, meta, state)
  564. def on_success(self, retval, task_id, args, kwargs):
  565. """Success handler.
  566. Run by the worker if the task executes successfully.
  567. :param retval: The return value of the task.
  568. :param task_id: Unique id of the executed task.
  569. :param args: Original arguments for the executed task.
  570. :param kwargs: Original keyword arguments for the executed task.
  571. The return value of this handler is ignored.
  572. """
  573. pass
  574. def on_retry(self, exc, task_id, args, kwargs, einfo):
  575. """Retry handler.
  576. This is run by the worker when the task is to be retried.
  577. :param exc: The exception sent to :meth:`retry`.
  578. :param task_id: Unique id of the retried task.
  579. :param args: Original arguments for the retried task.
  580. :param kwargs: Original keyword arguments for the retried task.
  581. :keyword einfo: :class:`~celery.datastructures.ExceptionInfo`
  582. instance, containing the traceback.
  583. The return value of this handler is ignored.
  584. """
  585. pass
  586. def on_failure(self, exc, task_id, args, kwargs, einfo):
  587. """Error handler.
  588. This is run by the worker when the task fails.
  589. :param exc: The exception raised by the task.
  590. :param task_id: Unique id of the failed task.
  591. :param args: Original arguments for the task that failed.
  592. :param kwargs: Original keyword arguments for the task
  593. that failed.
  594. :keyword einfo: :class:`~celery.datastructures.ExceptionInfo`
  595. instance, containing the traceback.
  596. The return value of this handler is ignored.
  597. """
  598. pass
  599. def after_return(self, status, retval, task_id, args, kwargs, einfo):
  600. """Handler called after the task returns.
  601. :param status: Current task state.
  602. :param retval: Task return value/exception.
  603. :param task_id: Unique id of the task.
  604. :param args: Original arguments for the task that failed.
  605. :param kwargs: Original keyword arguments for the task
  606. that failed.
  607. :keyword einfo: :class:`~celery.datastructures.ExceptionInfo`
  608. instance, containing the traceback (if any).
  609. The return value of this handler is ignored.
  610. """
  611. pass
  612. def send_error_email(self, context, exc, **kwargs):
  613. if self.send_error_emails and \
  614. not getattr(self, 'disable_error_emails', None):
  615. self.ErrorMail(self, **kwargs).send(context, exc)
  616. def execute(self, request, pool, loglevel, logfile, **kwargs):
  617. """The method the worker calls to execute the task.
  618. :param request: A :class:`~celery.worker.job.Request`.
  619. :param pool: A task pool.
  620. :param loglevel: Current loglevel.
  621. :param logfile: Name of the currently used logfile.
  622. :keyword consumer: The :class:`~celery.worker.consumer.Consumer`.
  623. """
  624. request.execute_using_pool(pool, loglevel, logfile)
  625. def push_request(self, *args, **kwargs):
  626. self.request_stack.push(Context(*args, **kwargs))
  627. def pop_request(self):
  628. self.request_stack.pop()
  629. def __repr__(self):
  630. """`repr(task)`"""
  631. return '<@task: %s>' % (self.name, )
  632. @property
  633. def request(self):
  634. """Current request object."""
  635. return self.request_stack.top
  636. @property
  637. def __name__(self):
  638. return self.__class__.__name__
  639. BaseTask = Task # compat alias