task.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592
  1. """
  2. Working with tasks and task sets.
  3. """
  4. from carrot.connection import DjangoAMQPConnection
  5. from celery.log import setup_logger
  6. from celery.registry import tasks
  7. from celery.messaging import TaskPublisher, TaskConsumer
  8. from celery.models import TaskMeta
  9. from django.core.cache import cache
  10. from datetime import timedelta
  11. from celery.backends import default_backend
  12. from celery.datastructures import PositionQueue
  13. from celery.result import AsyncResult
  14. from celery.timer import TimeoutTimer
  15. import uuid
  16. import pickle
  17. def apply_async(task, args, kwargs, routing_key=None, immediate=None,
  18. mandatory=None, connect_timeout=None, priority=None):
  19. """Run a task asynchronously by the celery daemon(s).
  20. :param task: The task to run (a callable object, or a :class:`Task`
  21. instance
  22. :param args: The positional arguments to pass on to the task (a ``list``).
  23. :param kwargs: The keyword arguments to pass on to the task (a ``dict``)
  24. :keyword routing_key: The routing key used to route the task to a worker
  25. server.
  26. :keyword immediate: Request immediate delivery. Will raise an exception
  27. if the task cannot be routed to a worker immediately.
  28. :keyword mandatory: Mandatory routing. Raises an exception if there's
  29. no running workers able to take on this task.
  30. :keyword connect_timeout: The timeout in seconds, before we give up
  31. on establishing a connection to the AMQP server.
  32. :keyword priority: The task priority, a number between ``0`` and ``9``.
  33. """
  34. message_opts = {"routing_key": routing_key,
  35. "immediate": immediate,
  36. "mandatory": mandatory,
  37. "priority": priority}
  38. for option_name, option_value in message_opts.items():
  39. message_opts[option_name] = getattr(task, option_name, option_value)
  40. amqp_connection = DjangoAMQPConnection(connect_timeout=connect_timeout)
  41. publisher = TaskPublsher(connection=amqp_connection)
  42. task_id = publisher.delay_task(task.name, args, kwargs, **message_opts)
  43. amqp_conection.close()
  44. return AsyncResult(task_id)
  45. def delay_task(task_name, *args, **kwargs):
  46. """Delay a task for execution by the ``celery`` daemon.
  47. :param task_name: the name of a task registered in the task registry.
  48. :param \*args: positional arguments to pass on to the task.
  49. :param \*\*kwargs: keyword arguments to pass on to the task.
  50. :raises celery.registry.NotRegistered: exception if no such task
  51. has been registered in the task registry.
  52. :rtype: :class:`celery.result.AsyncResult`.
  53. Example
  54. >>> r = delay_task("update_record", name="George Constanza", age=32)
  55. >>> r.ready()
  56. True
  57. >>> r.result
  58. "Record was updated"
  59. """
  60. if task_name not in tasks:
  61. raise tasks.NotRegistered(
  62. "Task with name %s not registered in the task registry." % (
  63. task_name))
  64. task = tasks[task_name]
  65. return apply_async(task, args, kwargs)
  66. def discard_all():
  67. """Discard all waiting tasks.
  68. This will ignore all tasks waiting for execution, and they will
  69. be deleted from the messaging server.
  70. :returns: the number of tasks discarded.
  71. :rtype: int
  72. """
  73. amqp_connection = DjangoAMQPConnection()
  74. consumer = TaskConsumer(connection=amqp_connection)
  75. discarded_count = consumer.discard_all()
  76. amqp_connection.close()
  77. return discarded_count
  78. def is_done(task_id):
  79. """Returns ``True`` if task with ``task_id`` has been executed.
  80. :rtype: bool
  81. """
  82. return default_backend.is_done(task_id)
  83. class Task(object):
  84. """A task that can be delayed for execution by the ``celery`` daemon.
  85. All subclasses of :class:`Task` must define the :meth:`run` method,
  86. which is the actual method the ``celery`` daemon executes.
  87. The :meth:`run` method supports both positional, and keyword arguments.
  88. .. attribute:: name
  89. *REQUIRED* All subclasses of :class:`Task` has to define the
  90. :attr:`name` attribute. This is the name of the task, registered
  91. in the task registry, and passed to :func:`delay_task`.
  92. .. attribute:: type
  93. The type of task, currently this can be ``regular``, or ``periodic``,
  94. however if you want a periodic task, you should subclass
  95. :class:`PeriodicTask` instead.
  96. :raises NotImplementedError: if the :attr:`name` attribute is not set.
  97. The resulting class is callable, which if called will apply the
  98. :meth:`run` method.
  99. Examples
  100. This is a simple task just logging a message,
  101. >>> from celery.task import tasks, Task
  102. >>> class MyTask(Task):
  103. ... name = "mytask"
  104. ...
  105. ... def run(self, some_arg=None, **kwargs):
  106. ... logger = self.get_logger(**kwargs)
  107. ... logger.info("Running MyTask with arg some_arg=%s" %
  108. ... some_arg))
  109. ... return 42
  110. ... tasks.register(MyTask)
  111. You can delay the task using the classmethod :meth:`delay`...
  112. >>> result = MyTask.delay(some_arg="foo")
  113. >>> result.status # after some time
  114. 'DONE'
  115. >>> result.result
  116. 42
  117. ...or using the :func:`delay_task` function, by passing the name of
  118. the task.
  119. >>> from celery.task import delay_task
  120. >>> result = delay_task(MyTask.name, some_arg="foo")
  121. """
  122. name = None
  123. type = "regular"
  124. max_retries = 0 # unlimited
  125. retry_interval = timedelta(seconds=2)
  126. auto_retry = False
  127. routing_key = None
  128. immediate = False
  129. mandatory = False
  130. def __init__(self):
  131. if not self.name:
  132. raise NotImplementedError("Tasks must define a name attribute.")
  133. def __call__(self, *args, **kwargs):
  134. return self.run(*args, **kwargs)
  135. def run(self, *args, **kwargs):
  136. """*REQUIRED* The actual task.
  137. All subclasses of :class:`Task` must define the run method.
  138. :raises NotImplementedError: by default, so you have to override
  139. this method in your subclass.
  140. """
  141. raise NotImplementedError("Tasks must define a run method.")
  142. def get_logger(self, **kwargs):
  143. """Get process-aware logger object.
  144. See :func:`celery.log.setup_logger`.
  145. """
  146. return setup_logger(**kwargs)
  147. def get_publisher(self):
  148. """Get a celery task message publisher.
  149. :rtype: :class:`celery.messaging.TaskPublisher`.
  150. Please be sure to close the AMQP connection when you're done
  151. with this object, i.e:
  152. >>> publisher = self.get_publisher()
  153. >>> # do something with publisher
  154. >>> publisher.connection.close()
  155. """
  156. return TaskPublisher(connection=DjangoAMQPConnection())
  157. def get_consumer(self):
  158. """Get a celery task message consumer.
  159. :rtype: :class:`celery.messaging.TaskConsumer`.
  160. Please be sure to close the AMQP connection when you're done
  161. with this object. i.e:
  162. >>> consumer = self.get_consumer()
  163. >>> # do something with consumer
  164. >>> consumer.connection.close()
  165. """
  166. return TaskConsumer(connection=DjangoAMQPConnection())
  167. def requeue(self, task_id, args, kwargs):
  168. publisher = self.get_publisher()
  169. publisher.requeue_task(self.name, task_id, args, kwargs)
  170. publisher.connection.close()
  171. def retry(self, task_id, args, kwargs):
  172. retry_queue.put(self.name, task_id, args, kwargs)
  173. @classmethod
  174. def delay(cls, *args, **kwargs):
  175. """Delay this task for execution by the ``celery`` daemon(s).
  176. :param \*args: positional arguments passed on to the task.
  177. :param \*\*kwargs: keyword arguments passed on to the task.
  178. :rtype: :class:`celery.result.AsyncResult`
  179. See :func:`delay_task`.
  180. """
  181. return apply_async(cls, args, kwargs)
  182. @classmethod
  183. def apply_async(cls, args, kwargs, **options):
  184. """Delay this task for execution by the ``celery`` daemon(s).
  185. :param args: positional arguments passed on to the task.
  186. :param kwargs: keyword arguments passed on to the task.
  187. :rtype: :class:`celery.result.AsyncResult`
  188. See :func:`apply_async`.
  189. """
  190. return apply_async(cls, args, kwargs, **options)
  191. class TaskSet(object):
  192. """A task containing several subtasks, making it possible
  193. to track how many, or when all of the tasks has been completed.
  194. :param task: The task class or name.
  195. Can either be a fully qualified task name, or a task class.
  196. :param args: A list of args, kwargs pairs.
  197. e.g. ``[[args1, kwargs1], [args2, kwargs2], ..., [argsN, kwargsN]]``
  198. .. attribute:: task_name
  199. The name of the task.
  200. .. attribute:: arguments
  201. The arguments, as passed to the task set constructor.
  202. .. attribute:: total
  203. Total number of tasks in this task set.
  204. Example
  205. >>> from djangofeeds.tasks import RefreshFeedTask
  206. >>> taskset = TaskSet(RefreshFeedTask, args=[
  207. ... [], {"feed_url": "http://cnn.com/rss"},
  208. ... [], {"feed_url": "http://bbc.com/rss"},
  209. ... [], {"feed_url": "http://xkcd.com/rss"}])
  210. >>> taskset_id, subtask_ids = taskset.run()
  211. >>> list_of_return_values = taskset.join()
  212. """
  213. def __init__(self, task, args):
  214. try:
  215. task_name = task.name
  216. except AttributeError:
  217. task_name = task
  218. self.task_name = task_name
  219. self.arguments = args
  220. self.total = len(args)
  221. def run(self):
  222. """Run all tasks in the taskset.
  223. :returns: A tuple containing the taskset id, and a list
  224. of subtask ids.
  225. :rtype: tuple
  226. Example
  227. >>> ts = RefreshFeeds([
  228. ... ["http://foo.com/rss", {}],
  229. ... ["http://bar.com/rss", {}],
  230. ... )
  231. >>> taskset_id, subtask_ids = ts.run()
  232. >>> taskset_id
  233. "d2c9b261-8eff-4bfb-8459-1e1b72063514"
  234. >>> subtask_ids
  235. ["b4996460-d959-49c8-aeb9-39c530dcde25",
  236. "598d2d18-ab86-45ca-8b4f-0779f5d6a3cb"]
  237. >>> time.sleep(10)
  238. >>> is_done(taskset_id)
  239. True
  240. """
  241. taskset_id = str(uuid.uuid4())
  242. amqp_connection = DjangoAMQPConnection()
  243. publisher = TaskPublisher(connection=amqp_connection)
  244. subtask_ids = []
  245. for arg, kwarg in self.arguments:
  246. subtask_id = publisher.delay_task_in_set(task_name=self.task_name,
  247. taskset_id=taskset_id,
  248. task_args=arg,
  249. task_kwargs=kwarg)
  250. subtask_ids.append(subtask_id)
  251. amqp_connection.close()
  252. return taskset_id, subtask_ids
  253. def iterate(self):
  254. """Iterate over the results returned after calling :meth:`run`.
  255. If any of the tasks raises an exception, the exception will
  256. be re-raised.
  257. """
  258. taskset_id, subtask_ids = self.run()
  259. results = dict([(task_id, AsyncResult(task_id))
  260. for task_id in subtask_ids])
  261. while results:
  262. for task_id, pending_result in results.items():
  263. if pending_result.status == "DONE":
  264. del(results[task_id])
  265. yield pending_result.result
  266. elif pending_result.status == "FAILURE":
  267. raise pending_result.result
  268. def join(self, timeout=None):
  269. """Gather the results for all of the tasks in the taskset,
  270. and return a list with them ordered by the order of which they
  271. were called.
  272. :keyword timeout: The time in seconds, how long
  273. it will wait for results, before the operation times out.
  274. :raises celery.timer.TimeoutError: if ``timeout`` is not ``None``
  275. and the operation takes longer than ``timeout`` seconds.
  276. If any of the tasks raises an exception, the exception
  277. will be reraised by :meth:`join`.
  278. :returns: list of return values for all tasks in the taskset.
  279. """
  280. timeout_timer = TimeoutTimer(timeout) # Timeout timer starts here.
  281. taskset_id, subtask_ids = self.run()
  282. pending_results = map(AsyncResult, subtask_ids)
  283. results = PositionQueue(length=len(subtask_ids))
  284. while True:
  285. for position, pending_result in enumerate(pending_results):
  286. if pending_result.status == "DONE":
  287. results[position] = pending_result.result
  288. elif pending_result.status == "FAILURE":
  289. raise pending_result.result
  290. if results.full():
  291. # Make list copy, so the returned type is not a position
  292. # queue.
  293. return list(results)
  294. # This raises TimeoutError when timed out.
  295. timeout_timer.tick()
  296. @classmethod
  297. def remote_execute(cls, func, args):
  298. """Apply ``args`` to function by distributing the args to the
  299. celery server(s)."""
  300. pickled = pickle.dumps(func)
  301. arguments = [[[pickled, arg, {}], {}] for arg in args]
  302. return cls(ExecuteRemoteTask, arguments)
  303. @classmethod
  304. def map(cls, func, args, timeout=None):
  305. """Distribute processing of the arguments and collect the results."""
  306. remote_task = cls.remote_execute(func, args)
  307. return remote_task.join(timeout=timeout)
  308. @classmethod
  309. def map_async(cls, func, args, timeout=None):
  310. """Distribute processing of the arguments and collect the results
  311. asynchronously.
  312. :returns: :class:`celery.result.AsyncResult` instance.
  313. """
  314. serfunc = pickle.dumps(func)
  315. return AsynchronousMapTask.delay(serfunc, args, timeout=timeout)
  316. def dmap(func, args, timeout=None):
  317. """Distribute processing of the arguments and collect the results.
  318. Example
  319. >>> from celery.task import map
  320. >>> import operator
  321. >>> dmap(operator.add, [[2, 2], [4, 4], [8, 8]])
  322. [4, 8, 16]
  323. """
  324. return TaskSet.map(func, args, timeout=timeout)
  325. class AsynchronousMapTask(Task):
  326. """Task used internally by :func:`dmap_async` and
  327. :meth:`TaskSet.map_async`. """
  328. name = "celery.map_async"
  329. def run(self, serfunc, args, **kwargs):
  330. timeout = kwargs.get("timeout")
  331. return TaskSet.map(pickle.loads(serfunc), args, timeout=timeout)
  332. tasks.register(AsynchronousMapTask)
  333. def dmap_async(func, args, timeout=None):
  334. """Distribute processing of the arguments and collect the results
  335. asynchronously.
  336. :returns: :class:`celery.result.AsyncResult` object.
  337. Example
  338. >>> from celery.task import dmap_async
  339. >>> import operator
  340. >>> presult = dmap_async(operator.add, [[2, 2], [4, 4], [8, 8]])
  341. >>> presult
  342. <AsyncResult: 373550e8-b9a0-4666-bc61-ace01fa4f91d>
  343. >>> presult.status
  344. 'DONE'
  345. >>> presult.result
  346. [4, 8, 16]
  347. """
  348. return TaskSet.map_async(func, args, timeout=timeout)
  349. class PeriodicTask(Task):
  350. """A periodic task is a task that behaves like a :manpage:`cron` job.
  351. .. attribute:: run_every
  352. *REQUIRED* Defines how often the task is run (its interval),
  353. it can be either a :class:`datetime.timedelta` object or an
  354. integer specifying the time in seconds.
  355. :raises NotImplementedError: if the :attr:`run_every` attribute is
  356. not defined.
  357. You have to register the periodic task in the task registry.
  358. Example
  359. >>> from celery.task import tasks, PeriodicTask
  360. >>> from datetime import timedelta
  361. >>> class MyPeriodicTask(PeriodicTask):
  362. ... name = "my_periodic_task"
  363. ... run_every = timedelta(seconds=30)
  364. ...
  365. ... def run(self, **kwargs):
  366. ... logger = self.get_logger(**kwargs)
  367. ... logger.info("Running MyPeriodicTask")
  368. >>> tasks.register(MyPeriodicTask)
  369. """
  370. run_every = timedelta(days=1)
  371. type = "periodic"
  372. def __init__(self):
  373. if not self.run_every:
  374. raise NotImplementedError(
  375. "Periodic tasks must have a run_every attribute")
  376. # If run_every is a integer, convert it to timedelta seconds.
  377. if isinstance(self.run_every, int):
  378. self.run_every = timedelta(seconds=self.run_every)
  379. super(PeriodicTask, self).__init__()
  380. class ExecuteRemoteTask(Task):
  381. """Execute an arbitrary function or object.
  382. *Note* You probably want :func:`execute_remote` instead, which this
  383. is an internal component of.
  384. The object must be pickleable, so you can't use lambdas or functions
  385. defined in the REPL (that is the python shell, or ``ipython``).
  386. """
  387. name = "celery.execute_remote"
  388. def run(self, ser_callable, fargs, fkwargs, **kwargs):
  389. """
  390. :param ser_callable: A pickled function or callable object.
  391. :param fargs: Positional arguments to apply to the function.
  392. :param fkwargs: Keyword arguments to apply to the function.
  393. """
  394. callable_ = pickle.loads(ser_callable)
  395. return callable_(*fargs, **fkwargs)
  396. tasks.register(ExecuteRemoteTask)
  397. def execute_remote(func, *args, **kwargs):
  398. """Execute arbitrary function/object remotely.
  399. :param func: A callable function or object.
  400. :param \*args: Positional arguments to apply to the function.
  401. :param \*\*kwargs: Keyword arguments to apply to the function.
  402. The object must be picklable, so you can't use lambdas or functions
  403. defined in the REPL (the objects must have an associated module).
  404. :returns: class:`celery.result.AsyncResult`.
  405. """
  406. return ExecuteRemoteTask.delay(pickle.dumps(func), args, kwargs)
  407. class DeleteExpiredTaskMetaTask(PeriodicTask):
  408. """A periodic task that deletes expired task metadata every day.
  409. This runs the current backend's
  410. :meth:`celery.backends.base.BaseBackend.cleanup` method.
  411. """
  412. name = "celery.delete_expired_task_meta"
  413. run_every = timedelta(days=1)
  414. def run(self, **kwargs):
  415. logger = self.get_logger(**kwargs)
  416. logger.info("Deleting expired task meta objects...")
  417. default_backend.cleanup()
  418. tasks.register(DeleteExpiredTaskMetaTask)