tasks.py 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. # -*- coding: utf-8 -*-
  2. from __future__ import absolute_import, unicode_literals
  3. from time import sleep
  4. from celery import chain, group, shared_task
  5. from celery.utils.log import get_task_logger
  6. logger = get_task_logger(__name__)
  7. @shared_task
  8. def add(x, y):
  9. """Add two numbers."""
  10. return x + y
  11. @shared_task
  12. def delayed_sum(numbers, pause_time=1):
  13. """Sum the iterable of numbers."""
  14. # Allow the task to be in STARTED state for
  15. # a limited period of time.
  16. sleep(pause_time)
  17. return sum(numbers)
  18. @shared_task(bind=True)
  19. def add_replaced(self, x, y):
  20. """Add two numbers (via the add task)."""
  21. raise self.replace(add.s(x, y))
  22. @shared_task(bind=True)
  23. def add_to_all(self, nums, val):
  24. """Add the given value to all supplied numbers."""
  25. subtasks = [add.s(num, val) for num in nums]
  26. raise self.replace(group(*subtasks))
  27. @shared_task
  28. def print_unicode(log_message='hå它 valmuefrø', print_message='hiöäüß'):
  29. """Task that both logs and print strings containing funny characters."""
  30. logger.warning(log_message)
  31. print(print_message)
  32. @shared_task
  33. def sleeping(i, **_):
  34. """Task sleeping for ``i`` seconds, and returning nothing."""
  35. sleep(i)
  36. @shared_task(bind=True)
  37. def ids(self, i):
  38. """Returns a tuple of ``root_id``, ``parent_id`` and
  39. the argument passed as ``i``."""
  40. return self.request.root_id, self.request.parent_id, i
  41. @shared_task(bind=True)
  42. def collect_ids(self, res, i):
  43. """Used as a callback in a chain or group where the previous tasks
  44. are :task:`ids`: returns a tuple of::
  45. (previous_result, (root_id, parent_id, i))
  46. """
  47. return res, (self.request.root_id, self.request.parent_id, i)
  48. @shared_task(bind=True, expires=60.0, max_retries=1)
  49. def retry_once(self):
  50. """Task that fails and is retried. Returns the number of retries."""
  51. if self.request.retries:
  52. return self.request.retries
  53. raise self.retry(countdown=0.1)
  54. @shared_task
  55. def redis_echo(message):
  56. """Task that appends the message to a redis list"""
  57. from redis import StrictRedis
  58. redis_connection = StrictRedis()
  59. redis_connection.rpush('redis-echo', message)
  60. @shared_task(bind=True)
  61. def second_order_replace1(self, state=False):
  62. from redis import StrictRedis
  63. redis_connection = StrictRedis()
  64. if not state:
  65. redis_connection.rpush('redis-echo', 'In A')
  66. new_task = chain(second_order_replace2.s(),
  67. second_order_replace1.si(state=True))
  68. raise self.replace(new_task)
  69. else:
  70. redis_connection.rpush('redis-echo', 'Out A')
  71. @shared_task(bind=True)
  72. def second_order_replace2(self, state=False):
  73. from redis import StrictRedis
  74. redis_connection = StrictRedis()
  75. if not state:
  76. redis_connection.rpush('redis-echo', 'In B')
  77. new_task = chain(redis_echo.s("In/Out C"),
  78. second_order_replace2.si(state=True))
  79. raise self.replace(new_task)
  80. else:
  81. redis_connection.rpush('redis-echo', 'Out B')