builtins.py 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.app.builtins
  4. ~~~~~~~~~~~~~~~~~~~
  5. Built-in tasks that are always available in all
  6. app instances. E.g. chord, group and xmap.
  7. """
  8. from __future__ import absolute_import
  9. from celery._state import get_current_worker_task, connect_on_app_finalize
  10. from celery.utils.log import get_logger
  11. __all__ = []
  12. logger = get_logger(__name__)
  13. @connect_on_app_finalize
  14. def add_backend_cleanup_task(app):
  15. """The backend cleanup task can be used to clean up the default result
  16. backend.
  17. If the configured backend requires periodic cleanup this task is also
  18. automatically configured to run every day at 4am (requires
  19. :program:`celery beat` to be running).
  20. """
  21. @app.task(name='celery.backend_cleanup', shared=False, lazy=False)
  22. def backend_cleanup():
  23. app.backend.cleanup()
  24. return backend_cleanup
  25. @connect_on_app_finalize
  26. def add_accumulate_task(app):
  27. """This task is used by Task.replace when replacing a task with
  28. a group, to "collect" results."""
  29. @app.task(bind=True, name='celery.accumulate', shared=False, lazy=False)
  30. def accumulate(self, *args, **kwargs):
  31. index = kwargs.get('index')
  32. return args[index] if index is not None else args
  33. @connect_on_app_finalize
  34. def add_unlock_chord_task(app):
  35. """This task is used by result backends without native chord support.
  36. It joins chords by creating a task chain polling the header for completion.
  37. """
  38. from celery.canvas import maybe_signature
  39. from celery.exceptions import ChordError
  40. from celery.result import allow_join_result, result_from_tuple
  41. default_propagate = app.conf.CELERY_CHORD_PROPAGATES
  42. @app.task(name='celery.chord_unlock', max_retries=None, shared=False,
  43. default_retry_delay=1, ignore_result=True, lazy=False, bind=True)
  44. def unlock_chord(self, group_id, callback, interval=None, propagate=None,
  45. max_retries=None, result=None,
  46. Result=app.AsyncResult, GroupResult=app.GroupResult,
  47. result_from_tuple=result_from_tuple):
  48. # if propagate is disabled exceptions raised by chord tasks
  49. # will be sent as part of the result list to the chord callback.
  50. # Since 3.1 propagate will be enabled by default, and instead
  51. # the chord callback changes state to FAILURE with the
  52. # exception set to ChordError.
  53. propagate = default_propagate if propagate is None else propagate
  54. if interval is None:
  55. interval = self.default_retry_delay
  56. # check if the task group is ready, and if so apply the callback.
  57. callback = maybe_signature(callback, app)
  58. deps = GroupResult(
  59. group_id,
  60. [result_from_tuple(r, app=app) for r in result],
  61. app=app,
  62. )
  63. j = deps.join_native if deps.supports_native_join else deps.join
  64. try:
  65. ready = deps.ready()
  66. except Exception as exc:
  67. raise self.retry(
  68. exc=exc, countdown=interval, max_retries=max_retries,
  69. )
  70. else:
  71. if not ready:
  72. raise self.retry(countdown=interval, max_retries=max_retries)
  73. callback = maybe_signature(callback, app=app)
  74. try:
  75. with allow_join_result():
  76. ret = j(timeout=3.0, propagate=propagate)
  77. except Exception as exc:
  78. try:
  79. culprit = next(deps._failed_join_report())
  80. reason = 'Dependency {0.id} raised {1!r}'.format(
  81. culprit, exc,
  82. )
  83. except StopIteration:
  84. reason = repr(exc)
  85. logger.error('Chord %r raised: %r', group_id, exc, exc_info=1)
  86. app.backend.chord_error_from_stack(callback,
  87. ChordError(reason))
  88. else:
  89. try:
  90. callback.delay(ret)
  91. except Exception as exc:
  92. logger.error('Chord %r raised: %r', group_id, exc, exc_info=1)
  93. app.backend.chord_error_from_stack(
  94. callback,
  95. exc=ChordError('Callback error: {0!r}'.format(exc)),
  96. )
  97. return unlock_chord
  98. @connect_on_app_finalize
  99. def add_map_task(app):
  100. from celery.canvas import signature
  101. @app.task(name='celery.map', shared=False, lazy=False)
  102. def xmap(task, it):
  103. task = signature(task, app=app).type
  104. return [task(item) for item in it]
  105. return xmap
  106. @connect_on_app_finalize
  107. def add_starmap_task(app):
  108. from celery.canvas import signature
  109. @app.task(name='celery.starmap', shared=False, lazy=False)
  110. def xstarmap(task, it):
  111. task = signature(task, app=app).type
  112. return [task(*item) for item in it]
  113. return xstarmap
  114. @connect_on_app_finalize
  115. def add_chunk_task(app):
  116. from celery.canvas import chunks as _chunks
  117. @app.task(name='celery.chunks', shared=False, lazy=False)
  118. def chunks(task, it, n):
  119. return _chunks.apply_chunks(task, it, n)
  120. return chunks
  121. @connect_on_app_finalize
  122. def add_group_task(app):
  123. """No longer used, but here for backwards compatibility."""
  124. from celery.canvas import maybe_signature
  125. from celery.result import result_from_tuple
  126. @app.task(name='celery.group', bind=True, shared=False, lazy=False)
  127. def group(self, tasks, result, group_id, partial_args, add_to_parent=True):
  128. app = self.app
  129. result = result_from_tuple(result, app)
  130. # any partial args are added to all tasks in the group
  131. taskit = (maybe_signature(task, app=app).clone(partial_args)
  132. for i, task in enumerate(tasks))
  133. with app.producer_or_acquire() as producer:
  134. [stask.apply_async(group_id=group_id, producer=producer,
  135. add_to_parent=False) for stask in taskit]
  136. parent = get_current_worker_task()
  137. if add_to_parent and parent:
  138. parent.add_trail(result)
  139. return result
  140. return group
  141. @connect_on_app_finalize
  142. def add_chain_task(app):
  143. """No longer used, but here for backwards compatibility."""
  144. @app.task(name='celery.chain', shared=False, lazy=False)
  145. def chain(*args, **kwargs):
  146. raise NotImplementedError('chain is not a real task')
  147. return chain
  148. @connect_on_app_finalize
  149. def add_chord_task(app):
  150. """No longer used, but here for backwards compatibility."""
  151. from celery import group, chord as _chord
  152. from celery.canvas import maybe_signature
  153. @app.task(name='celery.chord', bind=True, ignore_result=False,
  154. shared=False, lazy=False)
  155. def chord(self, header, body, partial_args=(), interval=None,
  156. countdown=1, max_retries=None, propagate=None,
  157. eager=False, **kwargs):
  158. app = self.app
  159. # - convert back to group if serialized
  160. tasks = header.tasks if isinstance(header, group) else header
  161. header = group([
  162. maybe_signature(s, app=app) for s in tasks
  163. ], app=self.app)
  164. body = maybe_signature(body, app=app)
  165. ch = _chord(header, body)
  166. return ch.run(header, body, partial_args, app, interval,
  167. countdown, max_retries, propagate, **kwargs)
  168. return chord