tasks.py 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180
  1. # -*- coding: utf-8 -*-
  2. from __future__ import absolute_import, unicode_literals
  3. from time import sleep
  4. from celery import chain, group, shared_task
  5. from celery.exceptions import SoftTimeLimitExceeded
  6. from celery.utils.log import get_task_logger
  7. from .conftest import get_redis_connection
  8. logger = get_task_logger(__name__)
  9. @shared_task
  10. def identity(x):
  11. return x
  12. @shared_task
  13. def add(x, y):
  14. """Add two numbers."""
  15. return x + y
  16. @shared_task(ignore_result=True)
  17. def add_ignore_result(x, y):
  18. """Add two numbers."""
  19. return x + y
  20. @shared_task
  21. def chain_add(x, y):
  22. (
  23. add.s(x, x) | add.s(y)
  24. ).apply_async()
  25. @shared_task
  26. def delayed_sum(numbers, pause_time=1):
  27. """Sum the iterable of numbers."""
  28. # Allow the task to be in STARTED state for
  29. # a limited period of time.
  30. sleep(pause_time)
  31. return sum(numbers)
  32. @shared_task
  33. def delayed_sum_with_soft_guard(numbers, pause_time=1):
  34. """Sum the iterable of numbers."""
  35. try:
  36. sleep(pause_time)
  37. return sum(numbers)
  38. except SoftTimeLimitExceeded:
  39. return 0
  40. @shared_task
  41. def tsum(nums):
  42. """Sum an iterable of numbers"""
  43. return sum(nums)
  44. @shared_task(bind=True)
  45. def add_replaced(self, x, y):
  46. """Add two numbers (via the add task)."""
  47. raise self.replace(add.s(x, y))
  48. @shared_task(bind=True)
  49. def add_to_all(self, nums, val):
  50. """Add the given value to all supplied numbers."""
  51. subtasks = [add.s(num, val) for num in nums]
  52. raise self.replace(group(*subtasks))
  53. @shared_task(bind=True)
  54. def add_to_all_to_chord(self, nums, val):
  55. for num in nums:
  56. self.add_to_chord(add.s(num, val))
  57. return 0
  58. @shared_task(bind=True)
  59. def add_chord_to_chord(self, nums, val):
  60. subtasks = [add.s(num, val) for num in nums]
  61. self.add_to_chord(group(subtasks) | tsum.s())
  62. return 0
  63. @shared_task
  64. def print_unicode(log_message='hå它 valmuefrø', print_message='hiöäüß'):
  65. """Task that both logs and print strings containing funny characters."""
  66. logger.warning(log_message)
  67. print(print_message)
  68. @shared_task
  69. def sleeping(i, **_):
  70. """Task sleeping for ``i`` seconds, and returning nothing."""
  71. sleep(i)
  72. @shared_task(bind=True)
  73. def ids(self, i):
  74. """Returns a tuple of ``root_id``, ``parent_id`` and
  75. the argument passed as ``i``."""
  76. return self.request.root_id, self.request.parent_id, i
  77. @shared_task(bind=True)
  78. def collect_ids(self, res, i):
  79. """Used as a callback in a chain or group where the previous tasks
  80. are :task:`ids`: returns a tuple of::
  81. (previous_result, (root_id, parent_id, i))
  82. """
  83. return res, (self.request.root_id, self.request.parent_id, i)
  84. @shared_task(bind=True, expires=60.0, max_retries=1)
  85. def retry_once(self):
  86. """Task that fails and is retried. Returns the number of retries."""
  87. if self.request.retries:
  88. return self.request.retries
  89. raise self.retry(countdown=0.1)
  90. @shared_task
  91. def redis_echo(message):
  92. """Task that appends the message to a redis list"""
  93. redis_connection = get_redis_connection()
  94. redis_connection.rpush('redis-echo', message)
  95. @shared_task(bind=True)
  96. def second_order_replace1(self, state=False):
  97. redis_connection = get_redis_connection()
  98. if not state:
  99. redis_connection.rpush('redis-echo', 'In A')
  100. new_task = chain(second_order_replace2.s(),
  101. second_order_replace1.si(state=True))
  102. raise self.replace(new_task)
  103. else:
  104. redis_connection.rpush('redis-echo', 'Out A')
  105. @shared_task(bind=True)
  106. def second_order_replace2(self, state=False):
  107. redis_connection = get_redis_connection()
  108. if not state:
  109. redis_connection.rpush('redis-echo', 'In B')
  110. new_task = chain(redis_echo.s("In/Out C"),
  111. second_order_replace2.si(state=True))
  112. raise self.replace(new_task)
  113. else:
  114. redis_connection.rpush('redis-echo', 'Out B')
  115. @shared_task(bind=True)
  116. def build_chain_inside_task(self):
  117. """Task to build a chain.
  118. This task builds a chain and returns the chain's AsyncResult
  119. to verify that Asyncresults are correctly converted into
  120. serializable objects"""
  121. test_chain = (
  122. add.s(1, 1) |
  123. add.s(2) |
  124. group(
  125. add.s(3),
  126. add.s(4)
  127. ) |
  128. add.s(5)
  129. )
  130. result = test_chain()
  131. return result