Browse Source

Merge branch ask/master

Mher Movsisyan 14 years ago
parent
commit
004b9992a8
100 changed files with 4065 additions and 1850 deletions
  1. 3 0
      .gitignore
  2. 103 75
      AUTHORS
  3. 867 19
      Changelog
  4. 22 20
      FAQ
  5. 7 9
      LICENSE
  6. 2 0
      MANIFEST.in
  7. 7 4
      README.rst
  8. 14 0
      TODO
  9. 14 12
      celery/__init__.py
  10. 28 0
      celery/actors.py
  11. 78 34
      celery/app/__init__.py
  12. 33 47
      celery/app/amqp.py
  13. 38 0
      celery/app/annotations.py
  14. 68 106
      celery/app/base.py
  15. 79 27
      celery/app/defaults.py
  16. 57 36
      celery/app/task/__init__.py
  17. 12 10
      celery/apps/beat.py
  18. 51 47
      celery/apps/worker.py
  19. 15 15
      celery/backends/__init__.py
  20. 39 21
      celery/backends/amqp.py
  21. 91 35
      celery/backends/base.py
  22. 44 9
      celery/backends/cache.py
  23. 21 10
      celery/backends/cassandra.py
  24. 31 11
      celery/backends/database.py
  25. 33 13
      celery/backends/mongodb.py
  26. 15 3
      celery/backends/pyredis.py
  27. 42 27
      celery/backends/redis.py
  28. 12 2
      celery/backends/tyrant.py
  29. 53 32
      celery/beat.py
  30. 60 13
      celery/bin/base.py
  31. 5 2
      celery/bin/camqadm.py
  32. 16 7
      celery/bin/celerybeat.py
  33. 50 6
      celery/bin/celeryctl.py
  34. 6 3
      celery/bin/celeryd.py
  35. 17 6
      celery/bin/celeryd_detach.py
  36. 45 24
      celery/bin/celeryd_multi.py
  37. 13 4
      celery/bin/celeryev.py
  38. 6 3
      celery/concurrency/__init__.py
  39. 23 10
      celery/concurrency/base.py
  40. 17 7
      celery/concurrency/eventlet.py
  41. 7 2
      celery/concurrency/gevent.py
  42. 12 11
      celery/concurrency/processes/__init__.py
  43. 3 2
      celery/concurrency/processes/_win.py
  44. 112 46
      celery/concurrency/processes/pool.py
  45. 7 8
      celery/concurrency/solo.py
  46. 15 1
      celery/concurrency/threads.py
  47. 5 2
      celery/conf.py
  48. 4 1
      celery/contrib/abortable.py
  49. 31 7
      celery/contrib/batches.py
  50. 2 0
      celery/contrib/rdb.py
  51. 121 100
      celery/datastructures.py
  52. 3 0
      celery/db/a805d4bd.py
  53. 3 0
      celery/db/dfd042c7.py
  54. 12 7
      celery/db/models.py
  55. 8 2
      celery/db/session.py
  56. 5 2
      celery/decorators.py
  57. 19 5
      celery/events/__init__.py
  58. 19 6
      celery/events/cursesmon.py
  59. 19 5
      celery/events/dumper.py
  60. 25 11
      celery/events/snapshot.py
  61. 42 13
      celery/events/state.py
  62. 29 0
      celery/exceptions.py
  63. 4 3
      celery/execute/__init__.py
  64. 20 8
      celery/execute/trace.py
  65. 18 13
      celery/loaders/__init__.py
  66. 12 1
      celery/loaders/app.py
  67. 35 14
      celery/loaders/base.py
  68. 16 4
      celery/loaders/default.py
  69. 23 3
      celery/local.py
  70. 50 34
      celery/log.py
  71. 12 7
      celery/messaging.py
  72. 249 35
      celery/platforms.py
  73. 12 2
      celery/registry.py
  74. 38 33
      celery/result.py
  75. 15 3
      celery/routes.py
  76. 78 63
      celery/schedules.py
  77. 72 29
      celery/security/__init__.py
  78. 30 22
      celery/security/certificate.py
  79. 0 5
      celery/security/exceptions.py
  80. 13 10
      celery/security/key.py
  81. 60 32
      celery/security/serialization.py
  82. 14 268
      celery/signals.py
  83. 2 5
      celery/states.py
  84. 23 12
      celery/task/__init__.py
  85. 17 5
      celery/task/base.py
  86. 35 17
      celery/task/chords.py
  87. 22 5
      celery/task/control.py
  88. 34 12
      celery/task/http.py
  89. 6 4
      celery/task/schedules.py
  90. 34 22
      celery/task/sets.py
  91. 3 0
      celery/tests/__init__.py
  92. 2 0
      celery/tests/compat.py
  93. 6 4
      celery/tests/config.py
  94. 2 0
      celery/tests/functional/case.py
  95. 2 0
      celery/tests/functional/tasks.py
  96. 324 0
      celery/tests/test_app/__init__.py
  97. 0 218
      celery/tests/test_app/test_app.py
  98. 90 2
      celery/tests/test_app/test_app_amqp.py
  99. 36 0
      celery/tests/test_app/test_app_defaults.py
  100. 16 15
      celery/tests/test_app/test_beat.py

+ 3 - 0
.gitignore

@@ -17,3 +17,6 @@ erl_crash.dump
 Documentation/
 Documentation/
 .tox/
 .tox/
 .ropeproject/
 .ropeproject/
+.project
+.pydevproject
+

+ 103 - 75
AUTHORS

@@ -1,75 +1,103 @@
-Ordered by date of first contribution:
-  Ask Solem <ask@celeryproject.org>
-  Grégoire Cachet <gregoire@audacy.fr>
-  Vitaly Babiy <vbabiy86@gmail.com>
-  Brian Rosner <brosner@gmail.com>
-  Sean Creeley <sean.creeley@gmail.com>
-  Ben Firshman <ben@firshman.co.uk>
-  Augusto Becciu <augusto@becciu.org>
-  Jonatan Heyman <jonatan@heyman.info>
-  Mark Hellewell <mark.hellewell@gmail.com>
-  Jerzy Kozera <jerzy.kozera@gmail.com>
-  Brad Jasper <bjasper@gmail.com>
-  Wes Winham <winhamwr@gmail.com>
-  Timo Sugliani
-  Michael Elsdoerfer <michael@elsdoerfer.com>
-  Jason Baker <amnorvend@gmail.com>
-  Wes Turner <wes.turner@gmail.com>
-  Maxim Bodyansky <bodyansky@gmail.com>
-  Rune Halvorsen <runefh@gmail.com>
-  Aaron Ross <aaron@wawd.com>
-  Adam Endicott
-  Jesper Noehr <jesper@noehr.org>
-  Mark Stover <stovenator@gmail.com>
-  Andrew Watts <andrewwatts@gmail.com>
-  Felix Berger <bflat1@gmx.net
-  Reza Lotun <rlotun@gmail.com>
-  Mikhail Korobov <kmike84@gmail.com>
-  Jeff Balogh <me@jeffbalogh.org>
-  Patrick Altman <paltman@gmail.com>
-  Vincent Driessen <vincent@datafox.nl>
-  Hari <haridara@gmail.com>
-  Bartosz Ptaszynski
-  Marcin Lulek <info@webreactor.eu>
-  Honza Kral <honza.kral@gmail.com>
-  Jonas Haag <jonas@lophus.org>
-  Armin Ronacher <armin.ronacher@active-4.com>
-  Gunnlaugur Thor Briem <gunnlaugur@gmail.com>
-  Mikhail Gusarov <dottedmag@dottedmag.net>
-  Frédéric Junod <frederic.junod@camptocamp.com>
-  Lukas Linhart <lukas.linhart@centrumholdings.com>
-  Clay Gerrard
-  David Miller <il.livid.dream@gmail.com>
-  Juarez Bochi <jbochi@gmail.com>
-  Noah Kantrowitz <noah@coderanger.net>
-  Gert Van Gool <gertvangool@gmail.com>
-  sdcooke
-  David Cramer <dcramer@gmail.com>
-  Bryan Berg <bryan@mixedmedialabs.com>
-  Piotr Sikora <piotr.sikora@frickle.com>
-  Sam Cooke <sam@mixcloud.com>
-  John Watson <johnw@mahalo.com>
-  Martin Galpin <m@66laps.com>
-  Chris Rose <offby1@offby1.net>
-  Christopher Peplin <peplin@bueda.com>
-  David White <dpwhite2@ncsu.edu>
-  Vladimir Kryachko <vladimir.kryachko@etvnet.com>
-  Simon Josi <simon.josi@atizo.com>
-  jpellerin
-  Norman Richards <orb@nostacktrace.com>
-  Christoph Burgmer <christoph@nwebs.de>
-  Allan Caffee <allan.caffee@gmail.com>
-  Ales Zoulek <ales.zoulek@gmail.com>
-  Roberto Gaiser <gaiser@geekbunker.org>
-  Balachandran C <balachandran.c@gramvaani.org>
-  Kevin Tran <hekevintran@gmail.com>
-  Branko Čibej <brane@apache.org>
-  Jeff Terrace <jterrace@gmail.com>
-  Ryan Petrello <lists@ryanpetrello.com>
-  Marcin Kuźmiński <marcin@python-works.com>
-  Adriano Petrich <petrich@gmail.com>
-  David Strauss <david@davidstrauss.net>
-  David Arthur <mumrah@gmail.com>
-  Miguel Hernandez Martos <enlavin@gmail.com>
-  Jannis Leidel <jannis@leidel.info>
-  Harm Verhagen <harm.verhagen@gmail.com>
+=========
+ AUTHORS
+=========
+:order: sorted
+
+Aaron Ross <aaron@wawd.com>
+Adam Endicott
+Adriano Petrich <petrich@gmail.com>
+Ales Zoulek <ales.zoulek@gmail.com>
+Allan Caffee <allan.caffee@gmail.com>
+Andrew Watts <andrewwatts@gmail.com>
+Armin Ronacher <armin.ronacher@active-4.com>
+Ask Solem <ask@celeryproject.org>
+Augusto Becciu <augusto@becciu.org>
+Balachandran C <balachandran.c@gramvaani.org>
+Bartosz Ptaszynski
+Ben Firshman <ben@firshman.co.uk>
+Brad Jasper <bjasper@gmail.com>
+Branko Čibej <brane@apache.org>
+Brian Rosner <brosner@gmail.com>
+Bryan Berg <bryan@mixedmedialabs.com>
+Chase Seibert <chase.seibert+github@gmail.com>
+Chris Adams <chris@improbable.org>
+Chris Rose <offby1@offby1.net>
+Chris Streeter <chris@chrisstreeter.com>
+Christoph Burgmer <christoph@nwebs.de>
+Christopher Peplin <peplin@bueda.com>
+Clay Gerrard
+Dan McGee <dan@archlinux.org>
+Daniel Watkins <daniel@daniel-watkins.co.uk>
+David Arthur <mumrah@gmail.com>
+David Cramer <dcramer@gmail.com>
+David Miller <il.livid.dream@gmail.com>
+David Strauss <david@davidstrauss.net>
+David White <dpwhite2@ncsu.edu>
+Felix Berger <bflat1@gmx.net
+Frédéric Junod <frederic.junod@camptocamp.com>
+Gert Van Gool <gertvangool@gmail.com>
+Greg Haskins <greg@greghaskins.com>
+Grégoire Cachet <gregoire@audacy.fr>
+Gunnlaugur Thor Briem <gunnlaugur@gmail.com>
+Hari <haridara@gmail.com>
+Harm Verhagen <harm.verhagen@gmail.com>
+Honza Kral <honza.kral@gmail.com>
+Ionel Maries Cristian <contact@ionelmc.ro>
+Ivan Metzlar <metzlar@gmail.com>
+Jannis Leidel <jannis@leidel.info>
+Jason Baker <amnorvend@gmail.com>
+Jeff Balogh <me@jeffbalogh.org>
+Jeff Terrace <jterrace@gmail.com>
+Jerzy Kozera <jerzy.kozera@gmail.com>
+Jesper Noehr <jesper@noehr.org>
+John Watson <johnw@mahalo.com>
+Jonas Haag <jonas@lophus.org>
+Jonatan Heyman <jonatan@heyman.info>
+Joshua Ginsberg <jag@flowtheory.net>
+Juan Ignacio Catalano <catalanojuan@gmail.com>
+Juarez Bochi <jbochi@gmail.com>
+Jude Nagurney <jude@pwan.org>
+Kevin Tran <hekevintran@gmail.com>
+Kornelijus Survila <kornholijo@gmail.com>
+Leo Dirac <leo@banyanbranch.com>
+Lukas Linhart <lukas.linhart@centrumholdings.com>
+Marcin Kuźmiński <marcin@python-works.com>
+Marcin Lulek <info@webreactor.eu>
+Mark Hellewell <mark.hellewell@gmail.com>
+Mark Stover <stovenator@gmail.com>
+Martin Galpin <m@66laps.com>
+Matthew J Morrison <mattj.morrison@gmail.com>
+Mauro Rocco <fireantology@gmail.com>
+Maxim Bodyansky <bodyansky@gmail.com>
+Michael Elsdoerfer <michael@elsdoerfer.com>
+Miguel Hernandez Martos <enlavin@gmail.com>
+Mikhail Gusarov <dottedmag@dottedmag.net>
+Mikhail Korobov <kmike84@gmail.com>
+Mitar <mitar@tnode.com>
+Neil Chintomby <neil@mochimedia.com>
+Noah Kantrowitz <noah@coderanger.net>
+Norman Richards <orb@nostacktrace.com>
+Patrick Altman <paltman@gmail.com>
+Piotr Sikora <piotr.sikora@frickle.com>
+Remy Noel <mocramis@gmail.com>
+Reza Lotun <rlotun@gmail.com>
+Roberto Gaiser <gaiser@geekbunker.org>
+Rune Halvorsen <runefh@gmail.com>
+Ryan P. Kelly <rpkelly@cpan.org>
+Ryan Petrello <lists@ryanpetrello.com>
+Sam Cooke <sam@mixcloud.com>
+Sean Creeley <sean.creeley@gmail.com>
+Simon Josi <simon.josi@atizo.com>
+Steeve Morin <steeve.morin@gmail.com>
+Stefan Kjartansson <esteban.supreme@gmail.com>
+Timo Sugliani
+Vincent Driessen <vincent@datafox.nl>
+Vitaly Babiy <vbabiy86@gmail.com>
+Vladimir Kryachko <vladimir.kryachko@etvnet.com>
+Wes Turner <wes.turner@gmail.com>
+Wes Winham <winhamwr@gmail.com>
+jpellerin
+kuno <neokuno@gmail.com>
+lookfwd <lookfwd@gmail.com>
+sdcooke
+Łukasz Oleś <lukaszoles@gmail.com>

File diff suppressed because it is too large
+ 867 - 19
Changelog


+ 22 - 20
FAQ

@@ -55,12 +55,12 @@ Is Celery dependent on pickle?
 
 
 **Answer:** No.
 **Answer:** No.
 
 
-Celery can support any serialization scheme and has support for JSON/YAML and
-Pickle by default. And as every task is associated with a content type, you
-can even send one task using pickle, and another using JSON.
+Celery can support any serialization scheme and has built-in support for
+JSON, YAML, Pickle and msgpack. Also, as every task is associated with a
+content type, you can even send one task using pickle, and another using JSON.
 
 
 The default serialization format is pickle simply because it is
 The default serialization format is pickle simply because it is
-convenient as it supports sending complex Python objects as task arguments.
+convenient (it supports sending complex Python objects as task arguments).
 
 
 If you need to communicate with other languages you should change
 If you need to communicate with other languages you should change
 to a serialization format that is suitable for that.
 to a serialization format that is suitable for that.
@@ -114,7 +114,7 @@ Is Celery multilingual?
 
 
 **Answer:** Yes.
 **Answer:** Yes.
 
 
-:mod:`~celery.bin.celeryd` is an implementation of Celery in python. If the
+:mod:`~celery.bin.celeryd` is an implementation of Celery in Python. If the
 language has an AMQP client, there shouldn't be much work to create a worker
 language has an AMQP client, there shouldn't be much work to create a worker
 in your language.  A Celery worker is just a program connecting to the broker
 in your language.  A Celery worker is just a program connecting to the broker
 to process messages.
 to process messages.
@@ -159,6 +159,14 @@ celeryd is not doing anything, just hanging
 **Answer:** See `MySQL is throwing deadlock errors, what can I do?`_.
 **Answer:** See `MySQL is throwing deadlock errors, what can I do?`_.
             or `Why is Task.delay/apply\* just hanging?`.
             or `Why is Task.delay/apply\* just hanging?`.
 
 
+.. _faq-results-unreliable:
+
+Task results aren't reliably returning
+--------------------------------------
+
+**Answer:** If you're using the database backend for results, and in particular
+using MySQL, see `MySQL is throwing deadlock errors, what can I do?`_.
+
 .. _faq-publish-hanging:
 .. _faq-publish-hanging:
 
 
 Why is Task.delay/apply\*/celeryd just hanging?
 Why is Task.delay/apply\*/celeryd just hanging?
@@ -172,8 +180,8 @@ most systems), it usually contains a message describing the reason.
 
 
 .. _faq-celeryd-on-freebsd:
 .. _faq-celeryd-on-freebsd:
 
 
-Why won't celeryd run on FreeBSD?
----------------------------------
+Does it work on FreeBSD?
+------------------------
 
 
 **Answer:** The multiprocessing pool requires a working POSIX semaphore
 **Answer:** The multiprocessing pool requires a working POSIX semaphore
 implementation which isn't enabled in FreeBSD by default. You have to enable
 implementation which isn't enabled in FreeBSD by default. You have to enable
@@ -244,7 +252,7 @@ other error is happening.
 
 
 .. _faq-periodic-task-does-not-run:
 .. _faq-periodic-task-does-not-run:
 
 
-Why won't my Periodic Task run?
+Why won't my periodic task run?
 -------------------------------
 -------------------------------
 
 
 **Answer:** See `Why won't my Task run?`_.
 **Answer:** See `Why won't my Task run?`_.
@@ -397,6 +405,10 @@ When running with the AMQP result backend, every task result will be sent
 as a message. If you don't collect these results, they will build up and
 as a message. If you don't collect these results, they will build up and
 RabbitMQ will eventually run out of memory.
 RabbitMQ will eventually run out of memory.
 
 
+Results expire after 1 day by default.  It may be a good idea
+to lower this value by configuring the :setting:`CELERY_TASK_RESULT_EXPIRES`
+setting.
+
 If you don't use the results for a task, make sure you set the
 If you don't use the results for a task, make sure you set the
 `ignore_result` option:
 `ignore_result` option:
 
 
@@ -409,17 +421,6 @@ If you don't use the results for a task, make sure you set the
     class MyTask(Task):
     class MyTask(Task):
         ignore_result = True
         ignore_result = True
 
 
-Results can also be disabled globally using the
-:setting:`CELERY_IGNORE_RESULT` setting.
-
-.. note::
-
-    Celery version 2.1 added support for automatic expiration of
-    AMQP result backend results.
-
-    To use this you need to run RabbitMQ 2.1 or higher and enable
-    the :setting:`CELERY_AMQP_TASK_RESULT_EXPIRES` setting.
-
 .. _faq-use-celery-with-stomp:
 .. _faq-use-celery-with-stomp:
 
 
 Can I use Celery with ActiveMQ/STOMP?
 Can I use Celery with ActiveMQ/STOMP?
@@ -453,7 +454,8 @@ Tasks
 How can I reuse the same connection when applying tasks?
 How can I reuse the same connection when applying tasks?
 --------------------------------------------------------
 --------------------------------------------------------
 
 
-**Answer**: See :ref:`executing-connections`.
+**Answer**: See the :setting:`BROKER_POOL_LIMIT` setting.
+The connection pool is enabled by default since version 2.5.
 
 
 .. _faq-execute-task-by-name:
 .. _faq-execute-task-by-name:
 
 

+ 7 - 9
LICENSE

@@ -1,23 +1,21 @@
-Copyright (c) 2009-2011, Ask Solem and contributors.
+Copyright (c) 2009-2011, Ask Solem & contributors.
 All rights reserved.
 All rights reserved.
 
 
 Redistribution and use in source and binary forms, with or without
 Redistribution and use in source and binary forms, with or without
 modification, are permitted provided that the following conditions are met:
 modification, are permitted provided that the following conditions are met:
-
-    * Redistributions of source code must retain the above copyright notice,
-      this list of conditions and the following disclaimer.
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
     * Redistributions in binary form must reproduce the above copyright
     * Redistributions in binary form must reproduce the above copyright
       notice, this list of conditions and the following disclaimer in the
       notice, this list of conditions and the following disclaimer in the
       documentation and/or other materials provided with the distribution.
       documentation and/or other materials provided with the distribution.
-
-Neither the name of Ask Solem nor the names of its contributors may be used
-to endorse or promote products derived from this software without specific
-prior written permission.
+    * Neither the name of Ask Solem nor the
+      names of its contributors may be used to endorse or promote products
+      derived from this software without specific prior written permission.
 
 
 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
 THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ask Solem OR CONTRIBUTORS
 BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
 BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS

+ 2 - 0
MANIFEST.in

@@ -22,3 +22,5 @@ prune celery/*.pyc
 prune examples/*.pyc
 prune examples/*.pyc
 prune bin/*.pyc
 prune bin/*.pyc
 prune docs/.build
 prune docs/.build
+prune docs/graffles
+prune .tox/*

+ 7 - 4
README.rst

@@ -2,9 +2,9 @@
  celery - Distributed Task Queue
  celery - Distributed Task Queue
 =================================
 =================================
 
 
-.. image:: http://cloud.github.com/downloads/ask/celery/celery_favicon_128.png
+.. image:: http://cloud.github.com/downloads/ask/celery/celery_128.png
 
 
-:Version: 2.3.0rc1
+:Version: 2.5.0a1
 :Web: http://celeryproject.org/
 :Web: http://celeryproject.org/
 :Download: http://pypi.python.org/pypi/celery/
 :Download: http://pypi.python.org/pypi/celery/
 :Source: http://github.com/ask/celery/
 :Source: http://github.com/ask/celery/
@@ -29,10 +29,11 @@ Celery is used in production systems to process millions of tasks a day.
 Celery is written in Python, but the protocol can be implemented in any
 Celery is written in Python, but the protocol can be implemented in any
 language.  It can also `operate with other languages using webhooks`_.
 language.  It can also `operate with other languages using webhooks`_.
 
 
-The recommended message broker is `RabbitMQ`_, but limited support for
+The recommended message broker is `RabbitMQ`_, but `limited support`_ for
 `Redis`_, `Beanstalk`_, `MongoDB`_, `CouchDB`_ and
 `Redis`_, `Beanstalk`_, `MongoDB`_, `CouchDB`_ and
 databases (using `SQLAlchemy`_ or the `Django ORM`_) is also available.
 databases (using `SQLAlchemy`_ or the `Django ORM`_) is also available.
 
 
+
 Celery is easy to integrate with `Django`_, `Pylons`_ and `Flask`_, using
 Celery is easy to integrate with `Django`_, `Pylons`_ and `Flask`_, using
 the `django-celery`_, `celery-pylons`_ and `Flask-Celery`_ add-on packages.
 the `django-celery`_, `celery-pylons`_ and `Flask-Celery`_ add-on packages.
 
 
@@ -53,6 +54,8 @@ the `django-celery`_, `celery-pylons`_ and `Flask-Celery`_ add-on packages.
 .. _`Flask-Celery`: http://github.com/ask/flask-celery/
 .. _`Flask-Celery`: http://github.com/ask/flask-celery/
 .. _`operate with other languages using webhooks`:
 .. _`operate with other languages using webhooks`:
     http://ask.github.com/celery/userguide/remote-tasks.html
     http://ask.github.com/celery/userguide/remote-tasks.html
+.. _`limited support`:
+    http://kombu.readthedocs.org/en/latest/introduction.html#transport-comparison
 
 
 .. contents::
 .. contents::
     :local:
     :local:
@@ -253,7 +256,7 @@ Mailing list
 ------------
 ------------
 
 
 For discussions about the usage, development, and future of celery,
 For discussions about the usage, development, and future of celery,
-please join the `celery-users`_ mailing list. 
+please join the `celery-users`_ mailing list.
 
 
 .. _`celery-users`: http://groups.google.com/group/celery-users/
 .. _`celery-users`: http://groups.google.com/group/celery-users/
 
 

+ 14 - 0
TODO

@@ -1,2 +1,16 @@
 Please see our Issue Tracker at GitHub:
 Please see our Issue Tracker at GitHub:
     http://github.com/ask/celery/issues
     http://github.com/ask/celery/issues
+
+Kombu
+=====
+
+Redis: Better acks by moving reserved messages into a temporary queue.
+       Can we use multi commands for anything?
+
+Celerymon
+=========
+
+Create a UI for celerymon using js, with statistics and so on.
+Needs a graphing library, maybe highcharts
+(http://www.highcharts.com/: awesome but weird license) or InfoVis
+(http://thejit.org/; BSD licensed).

+ 14 - 12
celery/__init__.py

@@ -1,33 +1,35 @@
+# -*- coding: utf-8 -*-
 """Distributed Task Queue"""
 """Distributed Task Queue"""
 # :copyright: (c) 2009 - 2011 by Ask Solem.
 # :copyright: (c) 2009 - 2011 by Ask Solem.
 # :license:   BSD, see LICENSE for more details.
 # :license:   BSD, see LICENSE for more details.
 
 
-import os
-import sys
-
-VERSION = (2, 3, 0, "rc1")
+from __future__ import absolute_import
 
 
+VERSION = (2, 5, 0, "b1")
 __version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
 __version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
 __author__ = "Ask Solem"
 __author__ = "Ask Solem"
 __contact__ = "ask@celeryproject.org"
 __contact__ = "ask@celeryproject.org"
 __homepage__ = "http://celeryproject.org"
 __homepage__ = "http://celeryproject.org"
 __docformat__ = "restructuredtext"
 __docformat__ = "restructuredtext"
 
 
+# -eof meta-
+
+import sys
+
 if sys.version_info < (2, 5):
 if sys.version_info < (2, 5):
     raise Exception(
     raise Exception(
         "Python 2.4 is not supported by this version. "
         "Python 2.4 is not supported by this version. "
         "Please use Celery versions 2.1.x or earlier.")
         "Please use Celery versions 2.1.x or earlier.")
 
 
+from .local import Proxy
+
 
 
 def Celery(*args, **kwargs):
 def Celery(*args, **kwargs):
-    from celery.app import App
+    from .app import App
     return App(*args, **kwargs)
     return App(*args, **kwargs)
 
 
-if not os.environ.get("CELERY_NO_EVAL", False):
-    from celery.local import LocalProxy
-
-    def _get_current_app():
-        from celery.app import current_app
-        return current_app()
 
 
-    current_app = LocalProxy(_get_current_app)
+def _get_current_app():
+    from .app import current_app
+    return current_app()
+current_app = Proxy(_get_current_app)

+ 28 - 0
celery/actors.py

@@ -0,0 +1,28 @@
+from celery.app import app_or_default
+
+import cl
+import cl.presence
+
+
+def construct(cls, instance, connection=None, *args, **kwargs):
+    app = instance.app = app_or_default(kwargs.pop("app", None))
+    super(cls, instance).__init__(connection or app.broker_connection(),
+                                  *args, **kwargs)
+
+
+class Actor(cl.Actor):
+
+    def __init__(self, *args, **kwargs):
+        construct(Actor, self, *args, **kwargs)
+
+
+class Agent(cl.Agent):
+
+    def __init__(self, *args, **kwargs):
+        construct(Agent, self, *args, **kwargs)
+
+
+class AwareAgent(cl.presence.AwareAgent):
+
+    def __init__(self, *args, **kwargs):
+        construct(AwareAgent, self, *args, **kwargs)

+ 78 - 34
celery/app/__init__.py

@@ -1,24 +1,28 @@
+# -*- coding: utf-8 -*-
 """
 """
-celery.app
-==========
+    celery.app
+    ~~~~~~~~~~
 
 
-Celery Application.
+    Celery Application.
 
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 
 """
 """
+
+from __future__ import absolute_import
+
 import os
 import os
 import threading
 import threading
 
 
 from functools import wraps
 from functools import wraps
 from inspect import getargspec
 from inspect import getargspec
 
 
-from kombu.utils import cached_property
+from .. import registry
+from ..utils import cached_property, instantiate
 
 
-from celery import registry
-from celery.app import base
-from celery.utils import instantiate
+from . import annotations
+from . import base
 
 
 # Apps with the :attr:`~celery.app.base.BaseApp.set_as_current` attribute
 # Apps with the :attr:`~celery.app.base.BaseApp.set_as_current` attribute
 # sets this, so it will always contain the last instantiated app,
 # sets this, so it will always contain the last instantiated app,
@@ -27,6 +31,35 @@ _tls = threading.local()
 _tls.current_app = None
 _tls.current_app = None
 
 
 
 
+class AppPickler(object):
+
+    def __call__(self, cls, *args):
+        kwargs = self.build_kwargs(*args)
+        app = self.construct(cls, **kwargs)
+        self.prepare(app, **kwargs)
+        return app
+
+    def prepare(self, app, **kwargs):
+        app.conf.update(kwargs["changes"])
+
+    def build_kwargs(self, *args):
+        return self.build_standard_kwargs(*args)
+
+    def build_standard_kwargs(self, main, changes, loader, backend, amqp,
+            events, log, control, accept_magic_kwargs):
+        return dict(main=main, loader=loader, backend=backend, amqp=amqp,
+                    changes=changes, events=events, log=log, control=control,
+                    set_as_current=False,
+                    accept_magic_kwargs=accept_magic_kwargs)
+
+    def construct(self, cls, **kwargs):
+        return cls(**kwargs)
+
+
+def _unpickle_app(cls, pickler, *args):
+    return pickler()(cls, *args)
+
+
 class App(base.BaseApp):
 class App(base.BaseApp):
     """Celery Application.
     """Celery Application.
 
 
@@ -43,8 +76,10 @@ class App(base.BaseApp):
     :keyword set_as_current:  Make this the global current app.
     :keyword set_as_current:  Make this the global current app.
 
 
     """
     """
+    Pickler = AppPickler
 
 
     def set_current(self):
     def set_current(self):
+        """Make this the current app for this thread."""
         _tls.current_app = self
         _tls.current_app = self
 
 
     def on_init(self):
     def on_init(self):
@@ -56,7 +91,7 @@ class App(base.BaseApp):
         taken from this app."""
         taken from this app."""
         conf = self.conf
         conf = self.conf
 
 
-        from celery.app.task import BaseTask
+        from .task import BaseTask
 
 
         class Task(BaseTask):
         class Task(BaseTask):
             abstract = True
             abstract = True
@@ -82,20 +117,23 @@ class App(base.BaseApp):
         """Create new :class:`~celery.apps.worker.Worker` instance."""
         """Create new :class:`~celery.apps.worker.Worker` instance."""
         return instantiate("celery.apps.worker.Worker", app=self, **kwargs)
         return instantiate("celery.apps.worker.Worker", app=self, **kwargs)
 
 
+    def WorkController(self, **kwargs):
+        return instantiate("celery.worker.WorkController", app=self, **kwargs)
+
     def Beat(self, **kwargs):
     def Beat(self, **kwargs):
         """Create new :class:`~celery.apps.beat.Beat` instance."""
         """Create new :class:`~celery.apps.beat.Beat` instance."""
         return instantiate("celery.apps.beat.Beat", app=self, **kwargs)
         return instantiate("celery.apps.beat.Beat", app=self, **kwargs)
 
 
     def TaskSet(self, *args, **kwargs):
     def TaskSet(self, *args, **kwargs):
         """Create new :class:`~celery.task.sets.TaskSet`."""
         """Create new :class:`~celery.task.sets.TaskSet`."""
-        from celery.task.sets import TaskSet
+        from ..task.sets import TaskSet
         kwargs["app"] = self
         kwargs["app"] = self
         return TaskSet(*args, **kwargs)
         return TaskSet(*args, **kwargs)
 
 
     def worker_main(self, argv=None):
     def worker_main(self, argv=None):
         """Run :program:`celeryd` using `argv`.  Uses :data:`sys.argv`
         """Run :program:`celeryd` using `argv`.  Uses :data:`sys.argv`
         if `argv` is not specified."""
         if `argv` is not specified."""
-        from celery.bin.celeryd import WorkerCommand
+        from ..bin.celeryd import WorkerCommand
         return WorkerCommand(app=self).execute_from_commandline(argv)
         return WorkerCommand(app=self).execute_from_commandline(argv)
 
 
     def task(self, *args, **options):
     def task(self, *args, **options):
@@ -157,11 +195,24 @@ class App(base.BaseApp):
             return inner_create_task_cls(**options)(*args)
             return inner_create_task_cls(**options)(*args)
         return inner_create_task_cls(**options)
         return inner_create_task_cls(**options)
 
 
+    def annotate_task(self, task):
+        if self.annotations:
+            match = annotations._first_match(self.annotations, task)
+            for attr, value in (match or {}).iteritems():
+                setattr(task, attr, value)
+            match_any = annotations._first_match_any(self.annotations)
+            for attr, value in (match_any or {}).iteritems():
+                setattr(task, attr, value)
+
     @cached_property
     @cached_property
     def Task(self):
     def Task(self):
         """Default Task base class for this application."""
         """Default Task base class for this application."""
         return self.create_task_cls()
         return self.create_task_cls()
 
 
+    @cached_property
+    def annotations(self):
+        return annotations.prepare(self.conf.CELERY_ANNOTATIONS)
+
     def __repr__(self):
     def __repr__(self):
         return "<Celery: %s:0x%x>" % (self.main or "__main__", id(self), )
         return "<Celery: %s:0x%x>" % (self.main or "__main__", id(self), )
 
 
@@ -169,26 +220,19 @@ class App(base.BaseApp):
         # Reduce only pickles the configuration changes,
         # Reduce only pickles the configuration changes,
         # so the default configuration doesn't have to be passed
         # so the default configuration doesn't have to be passed
         # between processes.
         # between processes.
-        return (_unpickle_app, (self.__class__,
-                                self.main,
-                                self.conf.changes,
-                                self.loader_cls,
-                                self.backend_cls,
-                                self.amqp_cls,
-                                self.events_cls,
-                                self.log_cls,
-                                self.control_cls,
-                                self.accept_magic_kwargs))
-
-
-def _unpickle_app(cls, main, changes, loader, backend, amqp,
-        events, log, control, accept_magic_kwargs):
-    app = cls(main, loader=loader, backend=backend, amqp=amqp,
-                    events=events, log=log, control=control,
-                    set_as_current=False,
-                    accept_magic_kwargs=accept_magic_kwargs)
-    app.conf.update(changes)
-    return app
+        return (_unpickle_app, (self.__class__, self.Pickler)
+                              + self.__reduce_args__())
+
+    def __reduce_args__(self):
+        return (self.main,
+                self.conf.changes,
+                self.loader_cls,
+                self.backend_cls,
+                self.amqp_cls,
+                self.events_cls,
+                self.log_cls,
+                self.control_cls,
+                self.accept_magic_kwargs)
 
 
 
 
 #: The "default" loader is the default loader used by old applications.
 #: The "default" loader is the default loader used by old applications.
@@ -221,12 +265,12 @@ def _app_or_default_trace(app=None):  # pragma: no cover
     from multiprocessing import current_process
     from multiprocessing import current_process
     if app is None:
     if app is None:
         if getattr(_tls, "current_app", None):
         if getattr(_tls, "current_app", None):
-            print("-- RETURNING TO CURRENT APP --")
+            print("-- RETURNING TO CURRENT APP --")  # noqa+
             print_stack()
             print_stack()
             return _tls.current_app
             return _tls.current_app
         if current_process()._name == "MainProcess":
         if current_process()._name == "MainProcess":
             raise Exception("DEFAULT APP")
             raise Exception("DEFAULT APP")
-        print("-- RETURNING TO DEFAULT APP --")
+        print("-- RETURNING TO DEFAULT APP --")      # noqa+
         print_stack()
         print_stack()
         return default_app
         return default_app
     return app
     return app

+ 33 - 47
celery/app/amqp.py

@@ -1,25 +1,25 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
 """
 """
-celery.app.amqp
-===============
+    celery.app.amqp
+    ~~~~~~~~~~~~~~~
 
 
-AMQ related functionality.
+    AMQ related functionality.
 
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 
 """
 """
+from __future__ import absolute_import
+
 from datetime import datetime, timedelta
 from datetime import datetime, timedelta
 
 
 from kombu import BrokerConnection, Exchange
 from kombu import BrokerConnection, Exchange
-from kombu.connection import Resource
 from kombu import compat as messaging
 from kombu import compat as messaging
-from kombu.utils import cached_property
+from kombu.pools import ProducerPool
 
 
-from celery import routes as _routes
-from celery import signals
-from celery.utils import gen_unique_id, textindent
-from celery.utils import promise, maybe_promise
+from .. import routes as _routes
+from .. import signals
+from ..utils import cached_property, textindent, uuid
 
 
 #: List of known options to a Kombu producers send method.
 #: List of known options to a Kombu producers send method.
 #: Used to extract the message related options out of any `dict`.
 #: Used to extract the message related options out of any `dict`.
@@ -90,6 +90,8 @@ class Queues(dict):
     def format(self, indent=0, indent_first=True):
     def format(self, indent=0, indent_first=True):
         """Format routing table into string for log dumps."""
         """Format routing table into string for log dumps."""
         active = self.consume_from
         active = self.consume_from
+        if not active:
+            return ""
         info = [QUEUE_FORMAT.strip() % dict(
         info = [QUEUE_FORMAT.strip() % dict(
                     name=(name + ":").ljust(12), **config)
                     name=(name + ":").ljust(12), **config)
                         for name, config in sorted(active.iteritems())]
                         for name, config in sorted(active.iteritems())]
@@ -132,6 +134,8 @@ class Queues(dict):
     def with_defaults(cls, queues, default_exchange, default_exchange_type):
     def with_defaults(cls, queues, default_exchange, default_exchange_type):
         """Alternate constructor that adds default exchange and
         """Alternate constructor that adds default exchange and
         exchange type information to queues that does not have any."""
         exchange type information to queues that does not have any."""
+        if queues is None:
+            queues = {}
         for opts in queues.values():
         for opts in queues.values():
             opts.setdefault("exchange", default_exchange),
             opts.setdefault("exchange", default_exchange),
             opts.setdefault("exchange_type", default_exchange_type)
             opts.setdefault("exchange_type", default_exchange_type)
@@ -150,6 +154,7 @@ class TaskPublisher(messaging.Publisher):
         self.retry = kwargs.pop("retry", self.retry)
         self.retry = kwargs.pop("retry", self.retry)
         self.retry_policy = kwargs.pop("retry_policy",
         self.retry_policy = kwargs.pop("retry_policy",
                                         self.retry_policy or {})
                                         self.retry_policy or {})
+        self.utc = kwargs.pop("enable_utc", False)
         super(TaskPublisher, self).__init__(*args, **kwargs)
         super(TaskPublisher, self).__init__(*args, **kwargs)
 
 
     def declare(self):
     def declare(self):
@@ -196,7 +201,7 @@ class TaskPublisher(messaging.Publisher):
                     exchange_type or self.exchange_type, retry, _retry_policy)
                     exchange_type or self.exchange_type, retry, _retry_policy)
             _exchanges_declared.add(exchange)
             _exchanges_declared.add(exchange)
 
 
-        task_id = task_id or gen_unique_id()
+        task_id = task_id or uuid()
         task_args = task_args or []
         task_args = task_args or []
         task_kwargs = task_kwargs or {}
         task_kwargs = task_kwargs or {}
         if not isinstance(task_args, (list, tuple)):
         if not isinstance(task_args, (list, tuple)):
@@ -204,10 +209,10 @@ class TaskPublisher(messaging.Publisher):
         if not isinstance(task_kwargs, dict):
         if not isinstance(task_kwargs, dict):
             raise ValueError("task kwargs must be a dictionary")
             raise ValueError("task kwargs must be a dictionary")
         if countdown:                           # Convert countdown to ETA.
         if countdown:                           # Convert countdown to ETA.
-            now = now or datetime.now()
+            now = now or datetime.utcnow()
             eta = now + timedelta(seconds=countdown)
             eta = now + timedelta(seconds=countdown)
         if isinstance(expires, int):
         if isinstance(expires, int):
-            now = now or datetime.now()
+            now = now or datetime.utcnow()
             expires = now + timedelta(seconds=expires)
             expires = now + timedelta(seconds=expires)
         eta = eta and eta.isoformat()
         eta = eta and eta.isoformat()
         expires = expires and expires.isoformat()
         expires = expires and expires.isoformat()
@@ -218,15 +223,16 @@ class TaskPublisher(messaging.Publisher):
                 "kwargs": task_kwargs or {},
                 "kwargs": task_kwargs or {},
                 "retries": retries or 0,
                 "retries": retries or 0,
                 "eta": eta,
                 "eta": eta,
-                "expires": expires}
-
+                "expires": expires,
+                "utc": self.utc}
         if taskset_id:
         if taskset_id:
             body["taskset"] = taskset_id
             body["taskset"] = taskset_id
         if chord:
         if chord:
             body["chord"] = chord
             body["chord"] = chord
 
 
+        do_retry = retry if retry is not None else self.retry
         send = self.send
         send = self.send
-        if retry is None and self.retry or retry:
+        if do_retry:
             send = connection.ensure(self, self.send, **_retry_policy)
             send = connection.ensure(self, self.send, **_retry_policy)
         send(body, exchange=exchange, **extract_msg_options(kwargs))
         send(body, exchange=exchange, **extract_msg_options(kwargs))
         signals.task_sent.send(sender=task_name, **body)
         signals.task_sent.send(sender=task_name, **body)
@@ -247,40 +253,19 @@ class TaskPublisher(messaging.Publisher):
             self.close()
             self.close()
 
 
 
 
-class PublisherPool(Resource):
+class PublisherPool(ProducerPool):
 
 
-    def __init__(self, app=None):
+    def __init__(self, app):
         self.app = app
         self.app = app
-        super(PublisherPool, self).__init__(limit=self.app.pool.limit)
+        super(PublisherPool, self).__init__(self.app.pool,
+                                            limit=self.app.pool.limit)
 
 
-    def create_publisher(self):
-        conn = self.app.pool.acquire(block=True)
+    def create_producer(self):
+        conn = self.connections.acquire(block=True)
         pub = self.app.amqp.TaskPublisher(conn, auto_declare=False)
         pub = self.app.amqp.TaskPublisher(conn, auto_declare=False)
-        conn._publisher_chan = pub.channel
+        conn._producer_chan = pub.channel
         return pub
         return pub
 
 
-    def new(self):
-        return promise(self.create_publisher)
-
-    def setup(self):
-        if self.limit:
-            for _ in xrange(self.limit):
-                self._resource.put_nowait(self.new())
-
-    def prepare(self, publisher):
-        pub = maybe_promise(publisher)
-        if not pub.connection:
-            pub.connection = self.app.pool.acquire(block=True)
-            if not getattr(pub.connection, "_publisher_chan", None):
-                pub.connection._publisher_chan = pub.connection.channel()
-            pub.revive(pub.connection._publisher_chan)
-        return pub
-
-    def release(self, resource):
-        resource.connection.release()
-        resource.connection = None
-        super(PublisherPool, self).release(resource)
-
 
 
 class AMQP(object):
 class AMQP(object):
     BrokerConnection = BrokerConnection
     BrokerConnection = BrokerConnection
@@ -301,7 +286,7 @@ class AMQP(object):
         """Create new :class:`Queues` instance, using queue defaults
         """Create new :class:`Queues` instance, using queue defaults
         from the current configuration."""
         from the current configuration."""
         conf = self.app.conf
         conf = self.app.conf
-        if not queues:
+        if not queues and conf.CELERY_DEFAULT_QUEUE:
             queues = {conf.CELERY_DEFAULT_QUEUE: {
             queues = {conf.CELERY_DEFAULT_QUEUE: {
                         "exchange": conf.CELERY_DEFAULT_EXCHANGE,
                         "exchange": conf.CELERY_DEFAULT_EXCHANGE,
                         "exchange_type": conf.CELERY_DEFAULT_EXCHANGE_TYPE,
                         "exchange_type": conf.CELERY_DEFAULT_EXCHANGE_TYPE,
@@ -337,6 +322,7 @@ class AMQP(object):
                     "serializer": conf.CELERY_TASK_SERIALIZER,
                     "serializer": conf.CELERY_TASK_SERIALIZER,
                     "retry": conf.CELERY_TASK_PUBLISH_RETRY,
                     "retry": conf.CELERY_TASK_PUBLISH_RETRY,
                     "retry_policy": conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
                     "retry_policy": conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
+                    "enable_utc": conf.CELERY_ENABLE_UTC,
                     "app": self}
                     "app": self}
         return TaskPublisher(*args, **self.app.merge(defaults, kwargs))
         return TaskPublisher(*args, **self.app.merge(defaults, kwargs))
 
 
@@ -366,4 +352,4 @@ class AMQP(object):
 
 
     @cached_property
     @cached_property
     def publisher_pool(self):
     def publisher_pool(self):
-        return PublisherPool(app=self.app)
+        return PublisherPool(self.app)

+ 38 - 0
celery/app/annotations.py

@@ -0,0 +1,38 @@
+from __future__ import absolute_import
+
+from ..utils import firstmethod, instantiate, mpromise
+
+_first_match = firstmethod("annotate")
+_first_match_any = firstmethod("annotate_any")
+
+
+class MapAnnotation(dict):
+
+    def annotate_any(self):
+        try:
+            return dict(self["*"])
+        except KeyError:
+            pass
+
+    def annotate(self, task):
+        try:
+            return dict(self[task.name])
+        except KeyError:
+            pass
+
+
+def prepare(annotations):
+    """Expands the :setting:`CELERY_ANNOTATIONS` setting."""
+
+    def expand_annotation(annotation):
+        if isinstance(annotation, dict):
+            return MapAnnotation(annotation)
+        elif isinstance(annotation, basestring):
+            return mpromise(instantiate, annotation)
+        return annotation
+
+    if annotations is None:
+        return ()
+    elif not isinstance(annotations, (list, tuple)):
+        annotations = (annotations, )
+    return map(expand_annotation, annotations)

+ 68 - 106
celery/app/base.py

@@ -1,28 +1,31 @@
+# -*- coding: utf-8 -*-
 """
 """
-celery.app.base
-===============
+    celery.app.base
+    ~~~~~~~~~~~~~~~
 
 
-Application Base Class.
+    Application Base Class.
 
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 
 """
 """
-from __future__ import absolute_import, with_statement
+from __future__ import absolute_import
+from __future__ import with_statement
 
 
+import os
 import platform as _platform
 import platform as _platform
-import sys
 
 
 from contextlib import contextmanager
 from contextlib import contextmanager
 from copy import deepcopy
 from copy import deepcopy
 from functools import wraps
 from functools import wraps
-from threading import Lock
 
 
-from kombu.utils import cached_property
+from kombu.clocks import LamportClock
 
 
-from celery import datastructures
-from celery.app.defaults import DEFAULTS
-from celery.utils import instantiate, lpmerge
+from .. import datastructures
+from .. import platforms
+from ..utils import cached_property, instantiate, lpmerge
+
+from .defaults import DEFAULTS, find_deprecated_settings
 
 
 import kombu
 import kombu
 if kombu.VERSION < (1, 1, 0):
 if kombu.VERSION < (1, 1, 0):
@@ -35,70 +38,6 @@ settings -> transport:%(transport)s results:%(results)s
 """
 """
 
 
 
 
-def pyimplementation():
-    if hasattr(_platform, "python_implementation"):
-        return _platform.python_implementation()
-    elif sys.platform.startswith("java"):
-        return "Jython %s" % (sys.platform, )
-    elif hasattr(sys, "pypy_version_info"):
-        v = ".".join(map(str, sys.pypy_version_info[:3]))
-        if sys.pypy_version_info[3:]:
-            v += "-" + "".join(map(str, sys.pypy_version_info[3:]))
-        return "PyPy %s" % (v, )
-    else:
-        return "CPython"
-
-
-class LamportClock(object):
-    """Lamports logical clock.
-
-    From Wikipedia:
-
-    "A Lamport logical clock is a monotonically incrementing software counter
-    maintained in each process.  It follows some simple rules:
-
-        * A process increments its counter before each event in that process;
-        * When a process sends a message, it includes its counter value with
-          the message;
-        * On receiving a message, the receiver process sets its counter to be
-          greater than the maximum of its own value and the received value
-          before it considers the message received.
-
-    Conceptually, this logical clock can be thought of as a clock that only
-    has meaning in relation to messages moving between processes.  When a
-    process receives a message, it resynchronizes its logical clock with
-    the sender.
-
-    .. seealso::
-
-        http://en.wikipedia.org/wiki/Lamport_timestamps
-        http://en.wikipedia.org/wiki/Lamport's_Distributed_
-            Mutual_Exclusion_Algorithm
-
-    *Usage*
-
-    When sending a message use :meth:`forward` to increment the clock,
-    when receiving a message use :meth:`adjust` to sync with
-    the timestamp of the incoming message.
-
-    """
-    #: The clocks current value.
-    value = 0
-
-    def __init__(self, initial_value=0):
-        self.value = initial_value
-        self.mutex = Lock()
-
-    def adjust(self, other):
-        with self.mutex:
-            self.value = max(self.value, other) + 1
-
-    def forward(self):
-        with self.mutex:
-            self.value += 1
-        return self.value
-
-
 class Settings(datastructures.ConfigurationView):
 class Settings(datastructures.ConfigurationView):
 
 
     @property
     @property
@@ -119,17 +58,24 @@ class Settings(datastructures.ConfigurationView):
         """Deprecated compat alias to :attr:`BROKER_TRANSPORT`."""
         """Deprecated compat alias to :attr:`BROKER_TRANSPORT`."""
         return self.BROKER_TRANSPORT
         return self.BROKER_TRANSPORT
 
 
+    @property
+    def BROKER_HOST(self):
+
+        return (os.environ.get("CELERY_BROKER_URL") or
+                self.get("BROKER_URL") or
+                self.get("BROKER_HOST"))
+
 
 
 class BaseApp(object):
 class BaseApp(object):
     """Base class for apps."""
     """Base class for apps."""
-    SYSTEM = _platform.system()
-    IS_OSX = SYSTEM == "Darwin"
-    IS_WINDOWS = SYSTEM == "Windows"
+    SYSTEM = platforms.SYSTEM
+    IS_OSX = platforms.IS_OSX
+    IS_WINDOWS = platforms.IS_WINDOWS
 
 
     amqp_cls = "celery.app.amqp.AMQP"
     amqp_cls = "celery.app.amqp.AMQP"
     backend_cls = None
     backend_cls = None
     events_cls = "celery.events.Events"
     events_cls = "celery.events.Events"
-    loader_cls = "app"
+    loader_cls = "celery.loaders.app.AppLoader"
     log_cls = "celery.log.Logging"
     log_cls = "celery.log.Logging"
     control_cls = "celery.task.control.Control"
     control_cls = "celery.task.control.Control"
 
 
@@ -137,7 +83,7 @@ class BaseApp(object):
 
 
     def __init__(self, main=None, loader=None, backend=None,
     def __init__(self, main=None, loader=None, backend=None,
             amqp=None, events=None, log=None, control=None,
             amqp=None, events=None, log=None, control=None,
-            set_as_current=True, accept_magic_kwargs=False):
+            set_as_current=True, accept_magic_kwargs=False, **kwargs):
         self.main = main
         self.main = main
         self.amqp_cls = amqp or self.amqp_cls
         self.amqp_cls = amqp or self.amqp_cls
         self.backend_cls = backend or self.backend_cls
         self.backend_cls = backend or self.backend_cls
@@ -147,16 +93,17 @@ class BaseApp(object):
         self.control_cls = control or self.control_cls
         self.control_cls = control or self.control_cls
         self.set_as_current = set_as_current
         self.set_as_current = set_as_current
         self.accept_magic_kwargs = accept_magic_kwargs
         self.accept_magic_kwargs = accept_magic_kwargs
-        self.on_init()
         self.clock = LamportClock()
         self.clock = LamportClock()
 
 
+        self.on_init()
+
     def on_init(self):
     def on_init(self):
         """Called at the end of the constructor."""
         """Called at the end of the constructor."""
         pass
         pass
 
 
     def config_from_object(self, obj, silent=False):
     def config_from_object(self, obj, silent=False):
         """Read configuration from object, where object is either
         """Read configuration from object, where object is either
-        a real object, or the name of an object to import.
+        a object, or the name of a module to import.
 
 
             >>> celery.config_from_object("myapp.celeryconfig")
             >>> celery.config_from_object("myapp.celeryconfig")
 
 
@@ -171,7 +118,7 @@ class BaseApp(object):
         """Read configuration from environment variable.
         """Read configuration from environment variable.
 
 
         The value of the environment variable must be the name
         The value of the environment variable must be the name
-        of an object to import.
+        of a module to import.
 
 
             >>> os.environ["CELERY_CONFIG_MODULE"] = "myapp.celeryconfig"
             >>> os.environ["CELERY_CONFIG_MODULE"] = "myapp.celeryconfig"
             >>> celery.config_from_envvar("CELERY_CONFIG_MODULE")
             >>> celery.config_from_envvar("CELERY_CONFIG_MODULE")
@@ -226,19 +173,19 @@ class BaseApp(object):
 
 
     def AsyncResult(self, task_id, backend=None, task_name=None):
     def AsyncResult(self, task_id, backend=None, task_name=None):
         """Create :class:`celery.result.BaseAsyncResult` instance."""
         """Create :class:`celery.result.BaseAsyncResult` instance."""
-        from celery.result import BaseAsyncResult
-        return BaseAsyncResult(task_id, app=self,
-                               task_name=task_name,
+        from ..result import BaseAsyncResult
+        return BaseAsyncResult(task_id, app=self, task_name=task_name,
                                backend=backend or self.backend)
                                backend=backend or self.backend)
 
 
     def TaskSetResult(self, taskset_id, results, **kwargs):
     def TaskSetResult(self, taskset_id, results, **kwargs):
         """Create :class:`celery.result.TaskSetResult` instance."""
         """Create :class:`celery.result.TaskSetResult` instance."""
-        from celery.result import TaskSetResult
+        from ..result import TaskSetResult
         return TaskSetResult(taskset_id, results, app=self)
         return TaskSetResult(taskset_id, results, app=self)
 
 
     def broker_connection(self, hostname=None, userid=None,
     def broker_connection(self, hostname=None, userid=None,
             password=None, virtual_host=None, port=None, ssl=None,
             password=None, virtual_host=None, port=None, ssl=None,
-            insist=None, connect_timeout=None, transport=None, **kwargs):
+            insist=None, connect_timeout=None, transport=None,
+            transport_options=None, **kwargs):
         """Establish a connection to the message broker.
         """Establish a connection to the message broker.
 
 
         :keyword hostname: defaults to the :setting:`BROKER_HOST` setting.
         :keyword hostname: defaults to the :setting:`BROKER_HOST` setting.
@@ -256,23 +203,31 @@ class BaseApp(object):
         :returns :class:`kombu.connection.BrokerConnection`:
         :returns :class:`kombu.connection.BrokerConnection`:
 
 
         """
         """
+        conf = self.conf
         return self.amqp.BrokerConnection(
         return self.amqp.BrokerConnection(
-                    hostname or self.conf.BROKER_HOST,
-                    userid or self.conf.BROKER_USER,
-                    password or self.conf.BROKER_PASSWORD,
-                    virtual_host or self.conf.BROKER_VHOST,
-                    port or self.conf.BROKER_PORT,
-                    transport=transport or self.conf.BROKER_TRANSPORT,
+                    hostname or conf.BROKER_HOST,
+                    userid or conf.BROKER_USER,
+                    password or conf.BROKER_PASSWORD,
+                    virtual_host or conf.BROKER_VHOST,
+                    port or conf.BROKER_PORT,
+                    transport=transport or conf.BROKER_TRANSPORT,
                     insist=self.either("BROKER_INSIST", insist),
                     insist=self.either("BROKER_INSIST", insist),
                     ssl=self.either("BROKER_USE_SSL", ssl),
                     ssl=self.either("BROKER_USE_SSL", ssl),
                     connect_timeout=self.either(
                     connect_timeout=self.either(
                                 "BROKER_CONNECTION_TIMEOUT", connect_timeout),
                                 "BROKER_CONNECTION_TIMEOUT", connect_timeout),
-                    transport_options=self.conf.BROKER_TRANSPORT_OPTIONS)
+                    transport_options=dict(conf.BROKER_TRANSPORT_OPTIONS,
+                                           **transport_options or {}))
 
 
     @contextmanager
     @contextmanager
     def default_connection(self, connection=None, connect_timeout=None):
     def default_connection(self, connection=None, connect_timeout=None):
         """For use within a with-statement to get a connection from the pool
         """For use within a with-statement to get a connection from the pool
-        if one is not already provided."""
+        if one is not already provided.
+
+        :keyword connection: If not provided, then a connection will be
+                             acquired from the connection pool.
+        :keyword connect_timeout: *No longer used.*
+
+        """
         if connection:
         if connection:
             yield connection
             yield connection
         else:
         else:
@@ -295,13 +250,13 @@ class BaseApp(object):
         @wraps(fun)
         @wraps(fun)
         def _inner(*args, **kwargs):
         def _inner(*args, **kwargs):
             connection = kwargs.pop("connection", None)
             connection = kwargs.pop("connection", None)
-            connect_timeout = kwargs.get("connect_timeout")
-            with self.default_connection(connection, connect_timeout) as c:
+            with self.default_connection(connection) as c:
                 return fun(*args, **dict(kwargs, connection=c))
                 return fun(*args, **dict(kwargs, connection=c))
         return _inner
         return _inner
 
 
     def prepare_config(self, c):
     def prepare_config(self, c):
         """Prepare configuration before it is merged with the defaults."""
         """Prepare configuration before it is merged with the defaults."""
+        find_deprecated_settings(c)
         return c
         return c
 
 
     def mail_admins(self, subject, body, fail_silently=False):
     def mail_admins(self, subject, body, fail_silently=False):
@@ -314,7 +269,14 @@ class BaseApp(object):
                                        port=self.conf.EMAIL_PORT,
                                        port=self.conf.EMAIL_PORT,
                                        user=self.conf.EMAIL_HOST_USER,
                                        user=self.conf.EMAIL_HOST_USER,
                                        password=self.conf.EMAIL_HOST_PASSWORD,
                                        password=self.conf.EMAIL_HOST_PASSWORD,
-                                       timeout=self.conf.EMAIL_TIMEOUT)
+                                       timeout=self.conf.EMAIL_TIMEOUT,
+                                       use_ssl=self.conf.EMAIL_USE_SSL,
+                                       use_tls=self.conf.EMAIL_USE_TLS)
+
+    def select_queues(self, queues=None):
+        if queues is not None:
+            return self.amqp.queues.select_subset(queues,
+                  self.conf.CELERY_CREATE_MISSING_QUEUES)
 
 
     def either(self, default_key, *values):
     def either(self, default_key, *values):
         """Fallback to the value of a configuration key if none of the
         """Fallback to the value of a configuration key if none of the
@@ -330,7 +292,7 @@ class BaseApp(object):
         return lpmerge(l, r)
         return lpmerge(l, r)
 
 
     def _get_backend(self):
     def _get_backend(self):
-        from celery.backends import get_backend_cls
+        from ..backends import get_backend_cls
         backend_cls = self.backend_cls or self.conf.CELERY_RESULT_BACKEND
         backend_cls = self.backend_cls or self.conf.CELERY_RESULT_BACKEND
         backend_cls = get_backend_cls(backend_cls, loader=self.loader)
         backend_cls = get_backend_cls(backend_cls, loader=self.loader)
         return backend_cls(app=self)
         return backend_cls(app=self)
@@ -349,7 +311,7 @@ class BaseApp(object):
         import kombu
         import kombu
         return BUGREPORT_INFO % {"system": _platform.system(),
         return BUGREPORT_INFO % {"system": _platform.system(),
                                  "arch": _platform.architecture(),
                                  "arch": _platform.architecture(),
-                                 "py_i": pyimplementation(),
+                                 "py_i": platforms.pyimplementation(),
                                  "celery_v": celery.__version__,
                                  "celery_v": celery.__version__,
                                  "kombu_v": kombu.__version__,
                                  "kombu_v": kombu.__version__,
                                  "py_v": _platform.python_version(),
                                  "py_v": _platform.python_version(),
@@ -364,8 +326,8 @@ class BaseApp(object):
                 register_after_fork(self, self._after_fork)
                 register_after_fork(self, self._after_fork)
             except ImportError:
             except ImportError:
                 pass
                 pass
-            self._pool = self.broker_connection().Pool(
-                            self.conf.BROKER_POOL_LIMIT)
+            limit = self.conf.BROKER_POOL_LIMIT
+            self._pool = self.broker_connection().Pool(limit)
         return self._pool
         return self._pool
 
 
     @cached_property
     @cached_property
@@ -375,7 +337,7 @@ class BaseApp(object):
 
 
     @cached_property
     @cached_property
     def backend(self):
     def backend(self):
-        """Storing/retreiving task state.  See
+        """Storing/retrieving task state.  See
         :class:`~celery.backend.base.BaseBackend`."""
         :class:`~celery.backend.base.BaseBackend`."""
         return self._get_backend()
         return self._get_backend()
 
 
@@ -398,7 +360,7 @@ class BaseApp(object):
     @cached_property
     @cached_property
     def loader(self):
     def loader(self):
         """Current loader."""
         """Current loader."""
-        from celery.loaders import get_loader_cls
+        from ..loaders import get_loader_cls
         return get_loader_cls(self.loader_cls)(app=self)
         return get_loader_cls(self.loader_cls)(app=self)
 
 
     @cached_property
     @cached_property

+ 79 - 27
celery/app/defaults.py

@@ -1,5 +1,19 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.app.defaults
+    ~~~~~~~~~~~~~~~~~~~
+
+    Configuration introspection and defaults.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
 import sys
 import sys
 
 
+from collections import deque
 from datetime import timedelta
 from datetime import timedelta
 
 
 is_jython = sys.platform.startswith("java")
 is_jython = sys.platform.startswith("java")
@@ -9,7 +23,11 @@ DEFAULT_POOL = "processes"
 if is_jython:
 if is_jython:
     DEFAULT_POOL = "threads"
     DEFAULT_POOL = "threads"
 elif is_pypy:
 elif is_pypy:
-    DEFAULT_POOL = "solo"
+    if sys.pypy_version_info[0:3] < (1, 5, 0):
+        DEFAULT_POOL = "solo"
+    else:
+        DEFAULT_POOL = "processes"
+
 
 
 DEFAULT_PROCESS_LOG_FMT = """
 DEFAULT_PROCESS_LOG_FMT = """
     [%(asctime)s: %(levelname)s/%(processName)s] %(message)s
     [%(asctime)s: %(levelname)s/%(processName)s] %(message)s
@@ -28,12 +46,17 @@ def str_to_bool(term, table={"false": False, "no": False, "0": False,
 
 
 
 
 class Option(object):
 class Option(object):
+    alt = None
+    deprecate_by = None
+    remove_by = None
     typemap = dict(string=str, int=int, float=float, any=lambda v: v,
     typemap = dict(string=str, int=int, float=float, any=lambda v: v,
                    bool=str_to_bool, dict=dict, tuple=tuple)
                    bool=str_to_bool, dict=dict, tuple=tuple)
 
 
     def __init__(self, default=None, *args, **kwargs):
     def __init__(self, default=None, *args, **kwargs):
         self.default = default
         self.default = default
         self.type = kwargs.get("type") or "string"
         self.type = kwargs.get("type") or "string"
+        for attr, value in kwargs.iteritems():
+            setattr(self, attr, value)
 
 
     def to_python(self, value):
     def to_python(self, value):
         return self.typemap[self.type](value)
         return self.typemap[self.type](value)
@@ -41,25 +64,31 @@ class Option(object):
 
 
 NAMESPACES = {
 NAMESPACES = {
     "BROKER": {
     "BROKER": {
-        "HOST": Option("localhost"),
+        "URL": Option(None, type="string"),
+        "HOST": Option(None, type="string"),
         "PORT": Option(type="int"),
         "PORT": Option(type="int"),
-        "USER": Option("guest"),
-        "PASSWORD": Option("guest"),
-        "VHOST": Option("/"),
-        "BACKEND": Option(),
+        "USER": Option(None, type="string"),
+        "PASSWORD": Option(None, type="string"),
+        "VHOST": Option(None, type="string"),
         "CONNECTION_TIMEOUT": Option(4, type="int"),
         "CONNECTION_TIMEOUT": Option(4, type="int"),
         "CONNECTION_RETRY": Option(True, type="bool"),
         "CONNECTION_RETRY": Option(True, type="bool"),
         "CONNECTION_MAX_RETRIES": Option(100, type="int"),
         "CONNECTION_MAX_RETRIES": Option(100, type="int"),
-        "POOL_LIMIT": Option(None, type="int"),
-        "INSIST": Option(False, type="bool"),
+        "POOL_LIMIT": Option(10, type="int"),
+        "INSIST": Option(False, type="bool",
+                         deprecate_by="2.4", remove_by="3.0"),
         "USE_SSL": Option(False, type="bool"),
         "USE_SSL": Option(False, type="bool"),
+        "TRANSPORT": Option(None, type="string"),
         "TRANSPORT_OPTIONS": Option({}, type="dict"),
         "TRANSPORT_OPTIONS": Option({}, type="dict"),
     },
     },
     "CELERY": {
     "CELERY": {
         "ACKS_LATE": Option(False, type="bool"),
         "ACKS_LATE": Option(False, type="bool"),
         "ALWAYS_EAGER": Option(False, type="bool"),
         "ALWAYS_EAGER": Option(False, type="bool"),
-        "AMQP_TASK_RESULT_EXPIRES": Option(type="int"),
-        "AMQP_TASK_RESULT_CONNECTION_MAX": Option(1, type="int"),
+        "AMQP_TASK_RESULT_EXPIRES": Option(type="int",
+                deprecate_by="2.5", remove_by="3.0",
+                alt="CELERY_TASK_RESULT_EXPIRES"),
+        "AMQP_TASK_RESULT_CONNECTION_MAX": Option(1, type="int",
+            remove_by="2.5", alt="BROKER_POOL_LIMIT"),
+        "ANNOTATIONS": Option(type="any"),
         "BROADCAST_QUEUE": Option("celeryctl"),
         "BROADCAST_QUEUE": Option("celeryctl"),
         "BROADCAST_EXCHANGE": Option("celeryctl"),
         "BROADCAST_EXCHANGE": Option("celeryctl"),
         "BROADCAST_EXCHANGE_TYPE": Option("fanout"),
         "BROADCAST_EXCHANGE_TYPE": Option("fanout"),
@@ -74,6 +103,7 @@ NAMESPACES = {
         "DEFAULT_EXCHANGE_TYPE": Option("direct"),
         "DEFAULT_EXCHANGE_TYPE": Option("direct"),
         "DEFAULT_DELIVERY_MODE": Option(2, type="string"),
         "DEFAULT_DELIVERY_MODE": Option(2, type="string"),
         "EAGER_PROPAGATES_EXCEPTIONS": Option(False, type="bool"),
         "EAGER_PROPAGATES_EXCEPTIONS": Option(False, type="bool"),
+        "ENABLE_UTC": Option(False, type="bool"),
         "EVENT_SERIALIZER": Option("json"),
         "EVENT_SERIALIZER": Option("json"),
         "IMPORTS": Option((), type="tuple"),
         "IMPORTS": Option((), type="tuple"),
         "IGNORE_RESULT": Option(False, type="bool"),
         "IGNORE_RESULT": Option(False, type="bool"),
@@ -84,7 +114,9 @@ NAMESPACES = {
         "REDIS_PORT": Option(None, type="int"),
         "REDIS_PORT": Option(None, type="int"),
         "REDIS_DB": Option(None, type="int"),
         "REDIS_DB": Option(None, type="int"),
         "REDIS_PASSWORD": Option(None, type="string"),
         "REDIS_PASSWORD": Option(None, type="string"),
+        "REDIS_MAX_CONNECTIONS": Option(None, type="int"),
         "RESULT_BACKEND": Option(None, type="string"),
         "RESULT_BACKEND": Option(None, type="string"),
+        "RESULT_DB_SHORT_LIVED_SESSIONS": Option(False, type="bool"),
         "RESULT_DBURI": Option(),
         "RESULT_DBURI": Option(),
         "RESULT_ENGINE_OPTIONS": Option(None, type="dict"),
         "RESULT_ENGINE_OPTIONS": Option(None, type="dict"),
         "RESULT_EXCHANGE": Option("celeryresults"),
         "RESULT_EXCHANGE": Option("celeryresults"),
@@ -96,7 +128,8 @@ NAMESPACES = {
         "SEND_TASK_ERROR_EMAILS": Option(False, type="bool"),
         "SEND_TASK_ERROR_EMAILS": Option(False, type="bool"),
         "SEND_TASK_SENT_EVENT": Option(False, type="bool"),
         "SEND_TASK_SENT_EVENT": Option(False, type="bool"),
         "STORE_ERRORS_EVEN_IF_IGNORED": Option(False, type="bool"),
         "STORE_ERRORS_EVEN_IF_IGNORED": Option(False, type="bool"),
-        "TASK_ERROR_WHITELIST": Option((), type="tuple"),
+        "TASK_ERROR_WHITELIST": Option((), type="tuple",
+            deprecate_by="2.5", remove_by="3.0"),
         "TASK_PUBLISH_RETRY": Option(True, type="bool"),
         "TASK_PUBLISH_RETRY": Option(True, type="bool"),
         "TASK_PUBLISH_RETRY_POLICY": Option({
         "TASK_PUBLISH_RETRY_POLICY": Option({
                 "max_retries": 100,
                 "max_retries": 100,
@@ -105,22 +138,27 @@ NAMESPACES = {
                 "interval_step": 0.2}, type="dict"),
                 "interval_step": 0.2}, type="dict"),
         "TASK_RESULT_EXPIRES": Option(timedelta(days=1), type="int"),
         "TASK_RESULT_EXPIRES": Option(timedelta(days=1), type="int"),
         "TASK_SERIALIZER": Option("pickle"),
         "TASK_SERIALIZER": Option("pickle"),
+        "TIMEZONE": Option(None, type="string"),
         "TRACK_STARTED": Option(False, type="bool"),
         "TRACK_STARTED": Option(False, type="bool"),
         "REDIRECT_STDOUTS": Option(True, type="bool"),
         "REDIRECT_STDOUTS": Option(True, type="bool"),
         "REDIRECT_STDOUTS_LEVEL": Option("WARNING"),
         "REDIRECT_STDOUTS_LEVEL": Option("WARNING"),
         "QUEUES": Option(None, type="dict"),
         "QUEUES": Option(None, type="dict"),
+        "SECURITY_KEY": Option(None, type="string"),
+        "SECURITY_CERTIFICATE": Option(None, type="string"),
+        "SECURITY_CERT_STORE": Option(None, type="string"),
     },
     },
     "CELERYD": {
     "CELERYD": {
         "AUTOSCALER": Option("celery.worker.autoscale.Autoscaler"),
         "AUTOSCALER": Option("celery.worker.autoscale.Autoscaler"),
         "CONCURRENCY": Option(0, type="int"),
         "CONCURRENCY": Option(0, type="int"),
-        "ETA_SCHEDULER": Option(None, type="str"),
+        "ETA_SCHEDULER": Option(None, type="string"),
         "ETA_SCHEDULER_PRECISION": Option(1.0, type="float"),
         "ETA_SCHEDULER_PRECISION": Option(1.0, type="float"),
         "HIJACK_ROOT_LOGGER": Option(True, type="bool"),
         "HIJACK_ROOT_LOGGER": Option(True, type="bool"),
         "CONSUMER": Option("celery.worker.consumer.Consumer"),
         "CONSUMER": Option("celery.worker.consumer.Consumer"),
         "LOG_FORMAT": Option(DEFAULT_PROCESS_LOG_FMT),
         "LOG_FORMAT": Option(DEFAULT_PROCESS_LOG_FMT),
         "LOG_COLOR": Option(type="bool"),
         "LOG_COLOR": Option(type="bool"),
-        "LOG_LEVEL": Option("WARN"),
-        "LOG_FILE": Option(),
+        "LOG_LEVEL": Option("WARN", deprecate_by="2.4", remove_by="3.0",
+                            alt="--loglevel argument"),
+        "LOG_FILE": Option(deprecate_by="2.4", remove_by="3.0"),
         "MEDIATOR": Option("celery.worker.mediator.Mediator"),
         "MEDIATOR": Option("celery.worker.mediator.Mediator"),
         "MAX_TASKS_PER_CHILD": Option(type="int"),
         "MAX_TASKS_PER_CHILD": Option(type="int"),
         "POOL": Option(DEFAULT_POOL),
         "POOL": Option(DEFAULT_POOL),
@@ -136,12 +174,12 @@ NAMESPACES = {
         "SCHEDULER": Option("celery.beat.PersistentScheduler"),
         "SCHEDULER": Option("celery.beat.PersistentScheduler"),
         "SCHEDULE_FILENAME": Option("celerybeat-schedule"),
         "SCHEDULE_FILENAME": Option("celerybeat-schedule"),
         "MAX_LOOP_INTERVAL": Option(5 * 60, type="int"),
         "MAX_LOOP_INTERVAL": Option(5 * 60, type="int"),
-        "LOG_LEVEL": Option("INFO"),
-        "LOG_FILE": Option(),
+        "LOG_LEVEL": Option("INFO", deprecate_by="2.4", remove_by="3.0"),
+        "LOG_FILE": Option(deprecate_by="2.4", remove_by="3.0"),
     },
     },
     "CELERYMON": {
     "CELERYMON": {
-        "LOG_LEVEL": Option("INFO"),
-        "LOG_FILE": Option(),
+        "LOG_LEVEL": Option("INFO", deprecate_by="2.4", remove_by="3.0"),
+        "LOG_FILE": Option(deprecate_by="2.4", remove_by="3.0"),
         "LOG_FORMAT": Option(DEFAULT_LOG_FMT),
         "LOG_FORMAT": Option(DEFAULT_LOG_FMT),
     },
     },
     "EMAIL": {
     "EMAIL": {
@@ -150,6 +188,8 @@ NAMESPACES = {
         "HOST_USER": Option(None),
         "HOST_USER": Option(None),
         "HOST_PASSWORD": Option(None),
         "HOST_PASSWORD": Option(None),
         "TIMEOUT": Option(2, type="int"),
         "TIMEOUT": Option(2, type="int"),
+        "USE_SSL": Option(False, type="bool"),
+        "USE_TLS": Option(False, type="bool"),
     },
     },
     "SERVER_EMAIL": Option("celery@localhost"),
     "SERVER_EMAIL": Option("celery@localhost"),
     "ADMINS": Option((), type="tuple"),
     "ADMINS": Option((), type="tuple"),
@@ -160,13 +200,25 @@ NAMESPACES = {
 }
 }
 
 
 
 
-def _flatten(d, ns=""):
-    acc = []
-    for key, value in d.iteritems():
-        if isinstance(value, dict):
-            acc.extend(_flatten(value, ns=key + '_'))
-        else:
-            acc.append((ns + key, value.default))
-    return acc
+def flatten(d, ns=""):
+    stack = deque([(ns, d)])
+    while stack:
+        name, space = stack.popleft()
+        for key, value in space.iteritems():
+            if isinstance(value, dict):
+                stack.append((name + key + '_', value))
+            else:
+                yield name + key, value
+
+
+def find_deprecated_settings(source):
+    from celery.utils import warn_deprecated
+    for name, opt in flatten(NAMESPACES):
+        if (opt.deprecate_by or opt.remove_by) and getattr(source, name, None):
+            warn_deprecated(description="The %r setting" % (name, ),
+                            deprecation=opt.deprecate_by,
+                            removal=opt.remove_by,
+                            alternative=opt.alt)
+
 
 
-DEFAULTS = dict(_flatten(NAMESPACES))
+DEFAULTS = dict((key, value.default) for key, value in flatten(NAMESPACES))

+ 57 - 36
celery/app/task/__init__.py

@@ -1,13 +1,27 @@
-# -*- coding: utf-8 -*-"
+# -*- coding: utf-8 -*-
+"""
+    celery.app.task
+    ~~~~~~~~~~~~~~~
+
+    Tasks Implementation.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+
+from __future__ import absolute_import
+
 import sys
 import sys
 import threading
 import threading
 
 
-from celery.datastructures import ExceptionInfo
-from celery.exceptions import MaxRetriesExceededError, RetryTaskError
-from celery.execute.trace import TaskTrace
-from celery.registry import tasks, _unpickle_task
-from celery.result import EagerResult
-from celery.utils import mattrgetter, gen_unique_id, fun_takes_kwargs
+from ...datastructures import ExceptionInfo
+from ...exceptions import MaxRetriesExceededError, RetryTaskError
+from ...execute.trace import TaskTrace
+from ...registry import tasks, _unpickle_task
+from ...result import EagerResult
+from ...utils import fun_takes_kwargs, mattrgetter, uuid
+from ...utils.mail import ErrorMail
 
 
 extract_exec_options = mattrgetter("queue", "routing_key",
 extract_exec_options = mattrgetter("queue", "routing_key",
                                    "exchange", "immediate",
                                    "exchange", "immediate",
@@ -20,6 +34,7 @@ class Context(threading.local):
     # Default context
     # Default context
     logfile = None
     logfile = None
     loglevel = None
     loglevel = None
+    hostname = None
     id = None
     id = None
     args = None
     args = None
     kwargs = None
     kwargs = None
@@ -28,6 +43,7 @@ class Context(threading.local):
     delivery_info = None
     delivery_info = None
     taskset = None
     taskset = None
     chord = None
     chord = None
+    called_directly = True
 
 
     def update(self, d, **kwargs):
     def update(self, d, **kwargs):
         self.__dict__.update(d, **kwargs)
         self.__dict__.update(d, **kwargs)
@@ -36,18 +52,19 @@ class Context(threading.local):
         self.__dict__.clear()
         self.__dict__.clear()
 
 
     def get(self, key, default=None):
     def get(self, key, default=None):
-        if not hasattr(self, key):
+        try:
+            return getattr(self, key)
+        except AttributeError:
             return default
             return default
-        return getattr(self, key)
 
 
 
 
 class TaskType(type):
 class TaskType(type):
-    """Metaclass for tasks.
+    """Meta class for tasks.
 
 
     Automatically registers the task in the task registry, except
     Automatically registers the task in the task registry, except
     if the `abstract` attribute is set.
     if the `abstract` attribute is set.
 
 
-    If no `name` attribute is provided, the name is automatically
+    If no `name` attribute is provided, then no name is automatically
     set to the name of the module it was defined in, and the class name.
     set to the name of the module it was defined in, and the class name.
 
 
     """
     """
@@ -65,7 +82,7 @@ class TaskType(type):
         if not attrs.get("name"):
         if not attrs.get("name"):
             try:
             try:
                 module_name = sys.modules[task_module].__name__
                 module_name = sys.modules[task_module].__name__
-            except KeyError:
+            except KeyError:  # pragma: no cover
                 # Fix for manage.py shell_plus (Issue #366).
                 # Fix for manage.py shell_plus (Issue #366).
                 module_name = task_module
                 module_name = task_module
             attrs["name"] = '.'.join([module_name, name])
             attrs["name"] = '.'.join([module_name, name])
@@ -82,6 +99,9 @@ class TaskType(type):
                 task_name = task_cls.name = '.'.join([task_cls.app.main, name])
                 task_name = task_cls.name = '.'.join([task_cls.app.main, name])
             tasks.register(task_cls)
             tasks.register(task_cls)
         task = tasks[task_name].__class__
         task = tasks[task_name].__class__
+
+        # decorate with annotations from config.
+        task.app.annotate_task(task)
         return task
         return task
 
 
     def __repr__(cls):
     def __repr__(cls):
@@ -98,6 +118,7 @@ class BaseTask(object):
     """
     """
     __metaclass__ = TaskType
     __metaclass__ = TaskType
 
 
+    ErrorMail = ErrorMail
     MaxRetriesExceededError = MaxRetriesExceededError
     MaxRetriesExceededError = MaxRetriesExceededError
 
 
     #: The application instance associated with this task class.
     #: The application instance associated with this task class.
@@ -215,7 +236,7 @@ class BaseTask(object):
     #: worker crashes mid execution (which may be acceptable for some
     #: worker crashes mid execution (which may be acceptable for some
     #: applications).
     #: applications).
     #:
     #:
-    #: The application default can be overriden with the
+    #: The application default can be overridden with the
     #: :setting:`CELERY_ACKS_LATE` setting.
     #: :setting:`CELERY_ACKS_LATE` setting.
     acks_late = False
     acks_late = False
 
 
@@ -239,15 +260,10 @@ class BaseTask(object):
     def get_logger(self, loglevel=None, logfile=None, propagate=False,
     def get_logger(self, loglevel=None, logfile=None, propagate=False,
             **kwargs):
             **kwargs):
         """Get task-aware logger object."""
         """Get task-aware logger object."""
-        if loglevel is None:
-            loglevel = self.request.loglevel
-        if logfile is None:
-            logfile = self.request.logfile
-        return self.app.log.setup_task_logger(loglevel=loglevel,
-                                              logfile=logfile,
-                                              propagate=propagate,
-                                              task_name=self.name,
-                                              task_id=self.request.id)
+        return self.app.log.setup_task_logger(
+            loglevel=self.request.loglevel if loglevel is None else loglevel,
+            logfile=self.request.logfile if logfile is None else logfile,
+            propagate=propagate, task_name=self.name, task_id=self.request.id)
 
 
     @classmethod
     @classmethod
     def establish_connection(self, connect_timeout=None):
     def establish_connection(self, connect_timeout=None):
@@ -277,8 +293,7 @@ class BaseTask(object):
                 ...     # ... do something with publisher
                 ...     # ... do something with publisher
 
 
         """
         """
-        if exchange is None:
-            exchange = self.exchange
+        exchange = self.exchange if exchange is None else exchange
         if exchange_type is None:
         if exchange_type is None:
             exchange_type = self.exchange_type
             exchange_type = self.exchange_type
         connection = connection or self.establish_connection(connect_timeout)
         connection = connection or self.establish_connection(connect_timeout)
@@ -340,7 +355,7 @@ class BaseTask(object):
 
 
         :keyword countdown: Number of seconds into the future that the
         :keyword countdown: Number of seconds into the future that the
                             task should execute. Defaults to immediate
                             task should execute. Defaults to immediate
-                            delivery (do not confuse with the
+                            execution (do not confuse with the
                             `immediate` flag, as they are unrelated).
                             `immediate` flag, as they are unrelated).
 
 
         :keyword eta: A :class:`~datetime.datetime` object describing
         :keyword eta: A :class:`~datetime.datetime` object describing
@@ -379,7 +394,7 @@ class BaseTask(object):
         :keyword exchange: The named exchange to send the task to.
         :keyword exchange: The named exchange to send the task to.
                            Defaults to the :attr:`exchange` attribute.
                            Defaults to the :attr:`exchange` attribute.
 
 
-        :keyword exchange_type: The exchange type to initalize the exchange
+        :keyword exchange_type: The exchange type to initialize the exchange
                                 if not already declared.  Defaults to the
                                 if not already declared.  Defaults to the
                                 :attr:`exchange_type` attribute.
                                 :attr:`exchange_type` attribute.
 
 
@@ -495,14 +510,16 @@ class BaseTask(object):
 
 
         """
         """
         request = self.request
         request = self.request
-        if max_retries is None:
-            max_retries = self.max_retries
-        if args is None:
-            args = request.args
-        if kwargs is None:
-            kwargs = request.kwargs
+        max_retries = self.max_retries if max_retries is None else max_retries
+        args = request.args if args is None else args
+        kwargs = request.kwargs if kwargs is None else kwargs
         delivery_info = request.delivery_info
         delivery_info = request.delivery_info
 
 
+        # Not in worker or emulated by (apply/always_eager),
+        # so just raise the original exception.
+        if request.called_directly:
+            raise exc or RetryTaskError("Task can be retried", None)
+
         if delivery_info:
         if delivery_info:
             options.setdefault("exchange", delivery_info.get("exchange"))
             options.setdefault("exchange", delivery_info.get("exchange"))
             options.setdefault("routing_key", delivery_info.get("routing_key"))
             options.setdefault("routing_key", delivery_info.get("routing_key"))
@@ -533,8 +550,7 @@ class BaseTask(object):
 
 
     @classmethod
     @classmethod
     def apply(self, args=None, kwargs=None, **options):
     def apply(self, args=None, kwargs=None, **options):
-        """Execute this task locally, by blocking until the task
-        returns.
+        """Execute this task locally, by blocking until the task returns.
 
 
         :param args: positional arguments passed on to the task.
         :param args: positional arguments passed on to the task.
         :param kwargs: keyword arguments passed on to the task.
         :param kwargs: keyword arguments passed on to the task.
@@ -547,7 +563,7 @@ class BaseTask(object):
         """
         """
         args = args or []
         args = args or []
         kwargs = kwargs or {}
         kwargs = kwargs or {}
-        task_id = options.get("task_id") or gen_unique_id()
+        task_id = options.get("task_id") or uuid()
         retries = options.get("retries", 0)
         retries = options.get("retries", 0)
         throw = self.app.either("CELERY_EAGER_PROPAGATES_EXCEPTIONS",
         throw = self.app.either("CELERY_EAGER_PROPAGATES_EXCEPTIONS",
                                 options.pop("throw", None))
                                 options.pop("throw", None))
@@ -661,6 +677,11 @@ class BaseTask(object):
         """
         """
         pass
         pass
 
 
+    def send_error_email(self, context, exc, **kwargs):
+        if self.send_error_emails and not self.disable_error_emails:
+            sender = self.ErrorMail(self, **kwargs)
+            sender.send(context, exc)
+
     def on_success(self, retval, task_id, args, kwargs):
     def on_success(self, retval, task_id, args, kwargs):
         """Success handler.
         """Success handler.
 
 
@@ -698,7 +719,7 @@ class BaseTask(object):
         """Returns :class:`~celery.task.sets.subtask` object for
         """Returns :class:`~celery.task.sets.subtask` object for
         this task, wrapping arguments and execution options
         this task, wrapping arguments and execution options
         for a single task invocation."""
         for a single task invocation."""
-        from celery.task.sets import subtask
+        from ...task.sets import subtask
         return subtask(cls, *args, **kwargs)
         return subtask(cls, *args, **kwargs)
 
 
     @property
     @property

+ 12 - 10
celery/apps/beat.py

@@ -1,14 +1,16 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
 import atexit
 import atexit
 import socket
 import socket
 import sys
 import sys
 import traceback
 import traceback
 
 
-from celery import __version__
-from celery import beat
-from celery import platforms
-from celery.app import app_or_default
-from celery.utils import get_full_cls_name, LOG_LEVELS
-from celery.utils.timeutils import humanize_seconds
+from .. import __version__, platforms
+from .. import beat
+from ..app import app_or_default
+from ..utils import get_full_cls_name, LOG_LEVELS
+from ..utils.timeutils import humanize_seconds
 
 
 STARTUP_INFO_FMT = """
 STARTUP_INFO_FMT = """
 Configuration ->
 Configuration ->
@@ -80,15 +82,15 @@ class Beat(object):
                   c.blue("        _\n"),
                   c.blue("        _\n"),
                   c.reset(self.startup_info(beat)))))
                   c.reset(self.startup_info(beat)))))
         if self.socket_timeout:
         if self.socket_timeout:
-            logger.debug("Setting default socket timeout to %r" % (
-                self.socket_timeout))
+            logger.debug("Setting default socket timeout to %r",
+                         self.socket_timeout)
             socket.setdefaulttimeout(self.socket_timeout)
             socket.setdefaulttimeout(self.socket_timeout)
         try:
         try:
             self.install_sync_handler(beat)
             self.install_sync_handler(beat)
             beat.start()
             beat.start()
         except Exception, exc:
         except Exception, exc:
-            logger.critical("celerybeat raised exception %s: %r\n%s" % (
-                            exc.__class__, exc, traceback.format_exc()),
+            logger.critical("celerybeat raised exception %s: %r\n%s",
+                            exc.__class__, exc, traceback.format_exc(),
                             exc_info=sys.exc_info())
                             exc_info=sys.exc_info())
 
 
     def init_loader(self):
     def init_loader(self):

+ 51 - 47
celery/apps/worker.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import atexit
 import atexit
@@ -11,13 +12,18 @@ import socket
 import sys
 import sys
 import warnings
 import warnings
 
 
-from celery import __version__
-from celery import platforms
-from celery import signals
-from celery.app import app_or_default
-from celery.exceptions import ImproperlyConfigured, SystemTerminate
-from celery.utils import get_full_cls_name, LOG_LEVELS, cry
-from celery.worker import WorkController
+from .. import __version__, platforms, signals
+from ..app import app_or_default
+from ..exceptions import ImproperlyConfigured, SystemTerminate
+from ..utils import get_full_cls_name, isatty, LOG_LEVELS, cry
+from ..worker import WorkController
+
+try:
+    from greenlet import GreenletExit
+    IGNORE_ERRORS = (GreenletExit, )
+except ImportError:
+    IGNORE_ERRORS = ()
+
 
 
 BANNER = """
 BANNER = """
  -------------- celery@%(hostname)s v%(version)s
  -------------- celery@%(hostname)s v%(version)s
@@ -39,6 +45,14 @@ EXTRA_INFO_FMT = """
 %(tasks)s
 %(tasks)s
 """
 """
 
 
+UNKNOWN_QUEUE_ERROR = """\
+Trying to select queue subset of %r, but queue %s is not
+defined in the CELERY_QUEUES setting.
+
+If you want to automatically declare unknown queues you can
+enable the CELERY_CREATE_MISSING_QUEUES setting.
+"""
+
 
 
 def cpu_count():
 def cpu_count():
     if multiprocessing is not None:
     if multiprocessing is not None:
@@ -49,6 +63,11 @@ def cpu_count():
     return 2
     return 2
 
 
 
 
+def get_process_name():
+    if multiprocessing is not None:
+        return multiprocessing.current_process().name
+
+
 class Worker(object):
 class Worker(object):
     WorkController = WorkController
     WorkController = WorkController
 
 
@@ -95,7 +114,7 @@ class Worker(object):
         if autoscale:
         if autoscale:
             max_c, _, min_c = autoscale.partition(",")
             max_c, _, min_c = autoscale.partition(",")
             self.autoscale = [int(max_c), min_c and int(min_c) or 0]
             self.autoscale = [int(max_c), min_c and int(min_c) or 0]
-        self._isatty = sys.stdout.isatty()
+        self._isatty = isatty(sys.stdout)
 
 
         self.colored = app.log.colored(self.logfile)
         self.colored = app.log.colored(self.logfile)
 
 
@@ -119,9 +138,9 @@ class Worker(object):
         self.worker_init()
         self.worker_init()
         self.redirect_stdouts_to_logger()
         self.redirect_stdouts_to_logger()
 
 
-        if getattr(os, "geteuid", None) and os.geteuid() == 0:
+        if getattr(os, "getuid", None) and os.getuid() == 0:
             warnings.warn(
             warnings.warn(
-                "Running celeryd with superuser privileges is not encouraged!")
+                "Running celeryd with superuser privileges is discouraged!")
 
 
         if self.discard:
         if self.discard:
             self.purge_messages()
             self.purge_messages()
@@ -132,25 +151,21 @@ class Worker(object):
               str(self.colored.reset(self.extra_info())))
               str(self.colored.reset(self.extra_info())))
         self.set_process_status("-active-")
         self.set_process_status("-active-")
 
 
-        self.run_worker()
+        try:
+            self.run_worker()
+        except IGNORE_ERRORS:
+            pass
 
 
     def on_consumer_ready(self, consumer):
     def on_consumer_ready(self, consumer):
         signals.worker_ready.send(sender=consumer)
         signals.worker_ready.send(sender=consumer)
         print("celery@%s has started." % self.hostname)
         print("celery@%s has started." % self.hostname)
 
 
     def init_queues(self):
     def init_queues(self):
-        if self.use_queues:
-            create_missing = self.app.conf.CELERY_CREATE_MISSING_QUEUES
-            try:
-                self.app.amqp.queues.select_subset(self.use_queues,
-                                                   create_missing)
-            except KeyError, exc:
-                raise ImproperlyConfigured(
-                    "Trying to select queue subset of %r, but queue %s"
-                    "is not defined in CELERY_QUEUES. If you want to "
-                    "automatically declare unknown queues you have to "
-                    "enable CELERY_CREATE_MISSING_QUEUES" % (
-                        self.use_queues, exc))
+        try:
+            self.app.select_queues(self.use_queues)
+        except KeyError, exc:
+            raise ImproperlyConfigured(
+                        UNKNOWN_QUEUE_ERROR % (self.use_queues, exc))
 
 
     def init_loader(self):
     def init_loader(self):
         self.loader = self.app.loader
         self.loader = self.app.loader
@@ -178,7 +193,7 @@ class Worker(object):
         self.loader.init_worker()
         self.loader.init_worker()
 
 
     def tasklist(self, include_builtins=True):
     def tasklist(self, include_builtins=True):
-        from celery.registry import tasks
+        from ..registry import tasks
         tasklist = tasks.keys()
         tasklist = tasks.keys()
         if not include_builtins:
         if not include_builtins:
             tasklist = filter(lambda s: not s.startswith("celery."),
             tasklist = filter(lambda s: not s.startswith("celery."),
@@ -274,16 +289,12 @@ class Worker(object):
 def install_worker_int_handler(worker):
 def install_worker_int_handler(worker):
 
 
     def _stop(signum, frame):
     def _stop(signum, frame):
-        process_name = None
-        if multiprocessing:
-            process_name = multiprocessing.current_process().name
+        process_name = get_process_name()
         if not process_name or process_name == "MainProcess":
         if not process_name or process_name == "MainProcess":
-            worker.logger.warn(
-                "celeryd: Hitting Ctrl+C again will terminate "
-                "all running tasks!")
+            print("celeryd: Hitting Ctrl+C again will terminate "
+                  "all running tasks!")
             install_worker_int_again_handler(worker)
             install_worker_int_again_handler(worker)
-            worker.logger.warn("celeryd: Warm shutdown (%s)" % (
-                process_name))
+            print("celeryd: Warm shutdown (%s)" % (process_name, ))
             worker.stop(in_sighandler=True)
             worker.stop(in_sighandler=True)
         raise SystemExit()
         raise SystemExit()
 
 
@@ -293,12 +304,9 @@ def install_worker_int_handler(worker):
 def install_worker_int_again_handler(worker):
 def install_worker_int_again_handler(worker):
 
 
     def _stop(signum, frame):
     def _stop(signum, frame):
-        process_name = None
-        if multiprocessing:
-            process_name = multiprocessing.current_process().name
+        process_name = get_process_name()
         if not process_name or process_name == "MainProcess":
         if not process_name or process_name == "MainProcess":
-            worker.logger.warn("celeryd: Cold shutdown (%s)" % (
-                process_name))
+            print("celeryd: Cold shutdown (%s)" % (process_name, ))
             worker.terminate(in_sighandler=True)
             worker.terminate(in_sighandler=True)
         raise SystemTerminate()
         raise SystemTerminate()
 
 
@@ -308,12 +316,9 @@ def install_worker_int_again_handler(worker):
 def install_worker_term_handler(worker):
 def install_worker_term_handler(worker):
 
 
     def _stop(signum, frame):
     def _stop(signum, frame):
-        process_name = None
-        if multiprocessing:
-            process_name = multiprocessing.current_process().name
+        process_name = get_process_name()
         if not process_name or process_name == "MainProcess":
         if not process_name or process_name == "MainProcess":
-            worker.logger.warn("celeryd: Warm shutdown (%s)" % (
-                process_name))
+            print("celeryd: Warm shutdown (%s)" % (process_name, ))
             worker.stop(in_sighandler=True)
             worker.stop(in_sighandler=True)
         raise SystemExit()
         raise SystemExit()
 
 
@@ -324,8 +329,7 @@ def install_worker_restart_handler(worker):
 
 
     def restart_worker_sig_handler(signum, frame):
     def restart_worker_sig_handler(signum, frame):
         """Signal handler restarting the current python program."""
         """Signal handler restarting the current python program."""
-        worker.logger.warn("Restarting celeryd (%s)" % (
-            " ".join(sys.argv)))
+        print("Restarting celeryd (%s)" % (" ".join(sys.argv), ))
         worker.stop(in_sighandler=True)
         worker.stop(in_sighandler=True)
         os.execv(sys.executable, [sys.executable] + sys.argv)
         os.execv(sys.executable, [sys.executable] + sys.argv)
 
 
@@ -345,14 +349,14 @@ def install_cry_handler(logger):
         platforms.signals["SIGUSR1"] = cry_handler
         platforms.signals["SIGUSR1"] = cry_handler
 
 
 
 
-def install_rdb_handler():  # pragma: no cover
+def install_rdb_handler(envvar="CELERY_RDBSIG"):  # pragma: no cover
 
 
     def rdb_handler(signum, frame):
     def rdb_handler(signum, frame):
         """Signal handler setting a rdb breakpoint at the current frame."""
         """Signal handler setting a rdb breakpoint at the current frame."""
-        from celery.contrib import rdb
+        from ..contrib import rdb
         rdb.set_trace(frame)
         rdb.set_trace(frame)
 
 
-    if os.environ.get("CELERY_RDBSIG"):
+    if os.environ.get(envvar):
         platforms.signals["SIGUSR2"] = rdb_handler
         platforms.signals["SIGUSR2"] = rdb_handler
 
 
 
 

+ 15 - 15
celery/backends/__init__.py

@@ -1,6 +1,10 @@
-from celery import current_app
-from celery.local import LocalProxy
-from celery.utils import get_cls_by_name
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from .. import current_app
+from ..local import Proxy
+from ..utils import get_cls_by_name
+from ..utils.functional import memoize
 
 
 BACKEND_ALIASES = {
 BACKEND_ALIASES = {
     "amqp": "celery.backends.amqp.AMQPBackend",
     "amqp": "celery.backends.amqp.AMQPBackend",
@@ -13,23 +17,19 @@ BACKEND_ALIASES = {
     "disabled": "celery.backends.base.DisabledBackend",
     "disabled": "celery.backends.base.DisabledBackend",
 }
 }
 
 
-_backend_cache = {}
-
 
 
+@memoize(100)
 def get_backend_cls(backend=None, loader=None):
 def get_backend_cls(backend=None, loader=None):
     """Get backend class by name/alias"""
     """Get backend class by name/alias"""
     backend = backend or "disabled"
     backend = backend or "disabled"
     loader = loader or current_app.loader
     loader = loader or current_app.loader
-    if backend not in _backend_cache:
-        aliases = dict(BACKEND_ALIASES, **loader.override_backends)
-        try:
-            _backend_cache[backend] = get_cls_by_name(backend, aliases)
-        except ValueError, exc:
-            raise ValueError("Unknown result backend: %r.  "
-                             "Did you spell it correctly?  (%s)" % (backend,
-                                                                    exc))
-    return _backend_cache[backend]
+    aliases = dict(BACKEND_ALIASES, **loader.override_backends)
+    try:
+        return get_cls_by_name(backend, aliases)
+    except ValueError, exc:
+        raise ValueError("Unknown result backend: %r.  "
+                         "Did you spell it correctly?  (%s)" % (backend, exc))
 
 
 
 
 # deprecate this.
 # deprecate this.
-default_backend = LocalProxy(lambda: current_app.backend)
+default_backend = Proxy(lambda: current_app.backend)

+ 39 - 21
celery/backends/amqp.py

@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
-from __future__ import absolute_import, with_statement
+from __future__ import absolute_import
+from __future__ import with_statement
 
 
 import socket
 import socket
 import threading
 import threading
@@ -10,9 +11,10 @@ from itertools import count
 from kombu.entity import Exchange, Queue
 from kombu.entity import Exchange, Queue
 from kombu.messaging import Consumer, Producer
 from kombu.messaging import Consumer, Producer
 
 
-from celery import states
-from celery.backends.base import BaseDictBackend
-from celery.exceptions import TimeoutError
+from .. import states
+from ..exceptions import TimeoutError
+
+from .base import BaseDictBackend
 
 
 
 
 class BacklogLimitExceeded(Exception):
 class BacklogLimitExceeded(Exception):
@@ -35,9 +37,11 @@ class AMQPBackend(BaseDictBackend):
 
 
     BacklogLimitExceeded = BacklogLimitExceeded
     BacklogLimitExceeded = BacklogLimitExceeded
 
 
+    supports_native_join = True
+
     def __init__(self, connection=None, exchange=None, exchange_type=None,
     def __init__(self, connection=None, exchange=None, exchange_type=None,
             persistent=None, serializer=None, auto_delete=True,
             persistent=None, serializer=None, auto_delete=True,
-            expires=None, connection_max=None, **kwargs):
+            **kwargs):
         super(AMQPBackend, self).__init__(**kwargs)
         super(AMQPBackend, self).__init__(**kwargs)
         conf = self.app.conf
         conf = self.app.conf
         self._connection = connection
         self._connection = connection
@@ -51,17 +55,23 @@ class AMQPBackend(BaseDictBackend):
                                       type=exchange_type,
                                       type=exchange_type,
                                       delivery_mode=delivery_mode,
                                       delivery_mode=delivery_mode,
                                       durable=self.persistent,
                                       durable=self.persistent,
-                                      auto_delete=auto_delete)
+                                      auto_delete=False)
         self.serializer = serializer or conf.CELERY_RESULT_SERIALIZER
         self.serializer = serializer or conf.CELERY_RESULT_SERIALIZER
         self.auto_delete = auto_delete
         self.auto_delete = auto_delete
-        self.expires = (conf.CELERY_AMQP_TASK_RESULT_EXPIRES if expires is None
-                                                             else expires)
-        if self.expires is not None:
-            self.expires = self.prepare_expires(self.expires)
-            # x-expires requires RabbitMQ 2.1.0 or higher.
-            self.queue_arguments["x-expires"] = self.expires * 1000.0
-        self.connection_max = (connection_max or
-                               conf.CELERY_AMQP_TASK_RESULT_CONNECTION_MAX)
+
+        # AMQP_TASK_RESULT_EXPIRES setting is deprecated and will be
+        # removed in version 3.0.
+        dexpires = conf.CELERY_AMQP_TASK_RESULT_EXPIRES
+
+        self.expires = None
+        if "expires" in kwargs:
+            if kwargs["expires"] is not None:
+                self.expires = self.prepare_expires(kwargs["expires"])
+        else:
+            self.expires = self.prepare_expires(dexpires)
+
+        if self.expires:
+            self.queue_arguments["x-expires"] = int(self.expires * 1000)
         self.mutex = threading.Lock()
         self.mutex = threading.Lock()
 
 
     def _create_binding(self, task_id):
     def _create_binding(self, task_id):
@@ -84,12 +94,10 @@ class AMQPBackend(BaseDictBackend):
 
 
     def _publish_result(self, connection, task_id, meta):
     def _publish_result(self, connection, task_id, meta):
         # cache single channel
         # cache single channel
-        if hasattr(connection, "_result_producer_chan") and \
-                connection._result_producer_chan is not None and \
-                connection._result_producer_chan.connection is not None:
-            channel = connection._result_producer_chan
-        else:
-            channel = connection._result_producer_chan = connection.channel()
+        if connection._default_channel is not None and \
+                connection._default_channel.connection is None:
+            connection.maybe_close_channel(connection._default_channel)
+        channel = connection.default_channel
 
 
         self._create_producer(task_id, channel).publish(meta)
         self._create_producer(task_id, channel).publish(meta)
 
 
@@ -104,7 +112,6 @@ class AMQPBackend(BaseDictBackend):
             with self.app.pool.acquire(block=True) as conn:
             with self.app.pool.acquire(block=True) as conn:
 
 
                 def errback(error, delay):
                 def errback(error, delay):
-                    conn._result_producer_chan = None
                     print("Couldn't send result for %r: %r. Retry in %rs." % (
                     print("Couldn't send result for %r: %r. Retry in %rs." % (
                             task_id, error, delay))
                             task_id, error, delay))
 
 
@@ -233,3 +240,14 @@ class AMQPBackend(BaseDictBackend):
     def delete_taskset(self, taskset_id):
     def delete_taskset(self, taskset_id):
         raise NotImplementedError(
         raise NotImplementedError(
                 "delete_taskset is not supported by this backend.")
                 "delete_taskset is not supported by this backend.")
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(connection=self._connection,
+                 exchange=self.exchange.name,
+                 exchange_type=self.exchange.type,
+                 persistent=self.persistent,
+                 serializer=self.serializer,
+                 auto_delete=self.auto_delete,
+                 expires=self.expires))
+        return super(AMQPBackend, self).__reduce__(args, kwargs)

+ 91 - 35
celery/backends/base.py

@@ -1,14 +1,30 @@
+# -*- coding: utf-8 -*-
 """celery.backends.base"""
 """celery.backends.base"""
+from __future__ import absolute_import
+
 import time
 import time
+import sys
 
 
 from datetime import timedelta
 from datetime import timedelta
 
 
-from celery import states
-from celery.exceptions import TimeoutError, TaskRevokedError
-from celery.utils import timeutils
-from celery.utils.serialization import pickle, get_pickled_exception
-from celery.utils.serialization import get_pickleable_exception
-from celery.datastructures import LocalCache
+from kombu import serialization
+
+from .. import states
+from ..datastructures import LRUCache
+from ..exceptions import TimeoutError, TaskRevokedError
+from ..utils import timeutils
+from ..utils.encoding import ensure_bytes, from_utf8
+from ..utils.serialization import (get_pickled_exception,
+                                   get_pickleable_exception,
+                                   create_exception_cls)
+
+EXCEPTION_ABLE_CODECS = frozenset(["pickle", "yaml"])
+is_py3k = sys.version_info >= (3, 0)
+
+
+def unpickle_backend(cls, args, kwargs):
+    """Returns an unpickled backend."""
+    return cls(*args, **kwargs)
 
 
 
 
 class BaseBackend(object):
 class BaseBackend(object):
@@ -19,9 +35,32 @@ class BaseBackend(object):
 
 
     TimeoutError = TimeoutError
     TimeoutError = TimeoutError
 
 
+    #: Time to sleep between polling each individual item
+    #: in `ResultSet.iterate`. as opposed to the `interval`
+    #: argument which is for each pass.
+    subpolling_interval = None
+
+    #: If true the backend must implement :meth:`get_many`.
+    supports_native_join = False
+
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
-        from celery.app import app_or_default
+        from ..app import app_or_default
         self.app = app_or_default(kwargs.get("app"))
         self.app = app_or_default(kwargs.get("app"))
+        self.serializer = kwargs.get("serializer",
+                                     self.app.conf.CELERY_RESULT_SERIALIZER)
+        (self.content_type,
+         self.content_encoding,
+         self.encoder) = serialization.registry._encoders[self.serializer]
+
+    def encode(self, data):
+        _, _, payload = serialization.encode(data, serializer=self.serializer)
+        return payload
+
+    def decode(self, payload):
+        payload = is_py3k and payload or str(payload)
+        return serialization.decode(payload,
+                                    content_type=self.content_type,
+                                    content_encoding=self.content_encoding)
 
 
     def prepare_expires(self, value, type=None):
     def prepare_expires(self, value, type=None):
         if value is None:
         if value is None:
@@ -33,7 +72,7 @@ class BaseBackend(object):
         return value
         return value
 
 
     def encode_result(self, result, status):
     def encode_result(self, result, status):
-        if status in self.EXCEPTION_STATES:
+        if status in self.EXCEPTION_STATES and isinstance(result, Exception):
             return self.prepare_exception(result)
             return self.prepare_exception(result)
         else:
         else:
             return self.prepare_value(result)
             return self.prepare_value(result)
@@ -68,11 +107,16 @@ class BaseBackend(object):
 
 
     def prepare_exception(self, exc):
     def prepare_exception(self, exc):
         """Prepare exception for serialization."""
         """Prepare exception for serialization."""
-        return get_pickleable_exception(exc)
+        if self.serializer in EXCEPTION_ABLE_CODECS:
+            return get_pickleable_exception(exc)
+        return {"exc_type": type(exc).__name__, "exc_message": str(exc)}
 
 
     def exception_to_python(self, exc):
     def exception_to_python(self, exc):
         """Convert serialized exception to Python exception."""
         """Convert serialized exception to Python exception."""
-        return get_pickled_exception(exc)
+        if self.serializer in EXCEPTION_ABLE_CODECS:
+            return get_pickled_exception(exc)
+        return create_exception_cls(from_utf8(exc["exc_type"]),
+                                    sys.modules[__name__])
 
 
     def prepare_value(self, result):
     def prepare_value(self, result):
         """Prepare value for storage."""
         """Prepare value for storage."""
@@ -159,23 +203,24 @@ class BaseBackend(object):
         raise NotImplementedError(
         raise NotImplementedError(
                 "reload_taskset_result is not supported by this backend.")
                 "reload_taskset_result is not supported by this backend.")
 
 
-    def on_chord_part_return(self, task):
+    def on_chord_part_return(self, task, propagate=False):
         pass
         pass
 
 
-    def on_chord_apply(self, setid, body, **kwargs):
-        from celery.registry import tasks
+    def on_chord_apply(self, setid, body, result=None, **kwargs):
+        from ..registry import tasks
+        kwargs["result"] = [r.task_id for r in result]
         tasks["celery.chord_unlock"].apply_async((setid, body, ), kwargs,
         tasks["celery.chord_unlock"].apply_async((setid, body, ), kwargs,
                                                  countdown=1)
                                                  countdown=1)
 
 
-    def __reduce__(self):
-        return (self.__class__, ())
+    def __reduce__(self, args=(), kwargs={}):
+        return (unpickle_backend, (self.__class__, args, kwargs))
 
 
 
 
 class BaseDictBackend(BaseBackend):
 class BaseDictBackend(BaseBackend):
 
 
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
         super(BaseDictBackend, self).__init__(*args, **kwargs)
         super(BaseDictBackend, self).__init__(*args, **kwargs)
-        self._cache = LocalCache(limit=kwargs.get("max_cached_results") or
+        self._cache = LRUCache(limit=kwargs.get("max_cached_results") or
                                  self.app.conf.CELERY_MAX_CACHED_RESULTS)
                                  self.app.conf.CELERY_MAX_CACHED_RESULTS)
 
 
     def store_result(self, task_id, result, status, traceback=None, **kwargs):
     def store_result(self, task_id, result, status, traceback=None, **kwargs):
@@ -208,8 +253,11 @@ class BaseDictBackend(BaseBackend):
             return meta["result"]
             return meta["result"]
 
 
     def get_task_meta(self, task_id, cache=True):
     def get_task_meta(self, task_id, cache=True):
-        if cache and task_id in self._cache:
-            return self._cache[task_id]
+        if cache:
+            try:
+                return self._cache[task_id]
+            except KeyError:
+                pass
 
 
         meta = self._get_task_meta_for(task_id)
         meta = self._get_task_meta_for(task_id)
         if cache and meta.get("status") == states.SUCCESS:
         if cache and meta.get("status") == states.SUCCESS:
@@ -224,8 +272,11 @@ class BaseDictBackend(BaseBackend):
                                                         cache=False)
                                                         cache=False)
 
 
     def get_taskset_meta(self, taskset_id, cache=True):
     def get_taskset_meta(self, taskset_id, cache=True):
-        if cache and taskset_id in self._cache:
-            return self._cache[taskset_id]
+        if cache:
+            try:
+                return self._cache[taskset_id]
+            except KeyError:
+                pass
 
 
         meta = self._restore_taskset(taskset_id)
         meta = self._restore_taskset(taskset_id)
         if cache and meta is not None:
         if cache and meta is not None:
@@ -250,6 +301,7 @@ class BaseDictBackend(BaseBackend):
 class KeyValueStoreBackend(BaseDictBackend):
 class KeyValueStoreBackend(BaseDictBackend):
     task_keyprefix = "celery-task-meta-"
     task_keyprefix = "celery-task-meta-"
     taskset_keyprefix = "celery-taskset-meta-"
     taskset_keyprefix = "celery-taskset-meta-"
+    chord_keyprefix = "chord-unlock-"
 
 
     def get(self, key):
     def get(self, key):
         raise NotImplementedError("Must implement the get method.")
         raise NotImplementedError("Must implement the get method.")
@@ -265,11 +317,15 @@ class KeyValueStoreBackend(BaseDictBackend):
 
 
     def get_key_for_task(self, task_id):
     def get_key_for_task(self, task_id):
         """Get the cache key for a task by id."""
         """Get the cache key for a task by id."""
-        return self.task_keyprefix + task_id
+        return ensure_bytes(self.task_keyprefix) + ensure_bytes(task_id)
 
 
     def get_key_for_taskset(self, taskset_id):
     def get_key_for_taskset(self, taskset_id):
-        """Get the cache key for a task by id."""
-        return self.taskset_keyprefix + taskset_id
+        """Get the cache key for a taskset by id."""
+        return ensure_bytes(self.taskset_keyprefix) + ensure_bytes(taskset_id)
+
+    def get_key_for_chord(self, taskset_id):
+        """Get the cache key for the chord waiting on taskset with given id."""
+        return ensure_bytes(self.chord_keyprefix) + ensure_bytes(taskset_id)
 
 
     def _strip_prefix(self, key):
     def _strip_prefix(self, key):
         for prefix in self.task_keyprefix, self.taskset_keyprefix:
         for prefix in self.task_keyprefix, self.taskset_keyprefix:
@@ -280,12 +336,12 @@ class KeyValueStoreBackend(BaseDictBackend):
     def _mget_to_results(self, values, keys):
     def _mget_to_results(self, values, keys):
         if hasattr(values, "items"):
         if hasattr(values, "items"):
             # client returns dict so mapping preserved.
             # client returns dict so mapping preserved.
-            return dict((self._strip_prefix(k), pickle.loads(str(v)))
+            return dict((self._strip_prefix(k), self.decode(v))
                             for k, v in values.iteritems()
                             for k, v in values.iteritems()
                                 if v is not None)
                                 if v is not None)
         else:
         else:
             # client returns list so need to recreate mapping.
             # client returns list so need to recreate mapping.
-            return dict((keys[i], pickle.loads(str(value)))
+            return dict((keys[i], self.decode(value))
                             for i, value in enumerate(values)
                             for i, value in enumerate(values)
                                 if value is not None)
                                 if value is not None)
 
 
@@ -303,6 +359,7 @@ class KeyValueStoreBackend(BaseDictBackend):
                     cached_ids.add(task_id)
                     cached_ids.add(task_id)
 
 
         ids ^= cached_ids
         ids ^= cached_ids
+        iterations = 0
         while ids:
         while ids:
             keys = list(ids)
             keys = list(ids)
             r = self._mget_to_results(self.mget([self.get_key_for_task(k)
             r = self._mget_to_results(self.mget([self.get_key_for_task(k)
@@ -311,19 +368,22 @@ class KeyValueStoreBackend(BaseDictBackend):
             ids ^= set(r.keys())
             ids ^= set(r.keys())
             for key, value in r.iteritems():
             for key, value in r.iteritems():
                 yield key, value
                 yield key, value
+            if timeout and iterations * interval >= timeout:
+                raise TimeoutError("Operation timed out (%s)" % (timeout, ))
             time.sleep(interval)  # don't busy loop.
             time.sleep(interval)  # don't busy loop.
+            iterations += 0
 
 
     def _forget(self, task_id):
     def _forget(self, task_id):
         self.delete(self.get_key_for_task(task_id))
         self.delete(self.get_key_for_task(task_id))
 
 
     def _store_result(self, task_id, result, status, traceback=None):
     def _store_result(self, task_id, result, status, traceback=None):
         meta = {"status": status, "result": result, "traceback": traceback}
         meta = {"status": status, "result": result, "traceback": traceback}
-        self.set(self.get_key_for_task(task_id), pickle.dumps(meta))
+        self.set(self.get_key_for_task(task_id), self.encode(meta))
         return result
         return result
 
 
     def _save_taskset(self, taskset_id, result):
     def _save_taskset(self, taskset_id, result):
         self.set(self.get_key_for_taskset(taskset_id),
         self.set(self.get_key_for_taskset(taskset_id),
-                 pickle.dumps({"result": result}))
+                 self.encode({"result": result}))
         return result
         return result
 
 
     def _delete_taskset(self, taskset_id):
     def _delete_taskset(self, taskset_id):
@@ -334,17 +394,17 @@ class KeyValueStoreBackend(BaseDictBackend):
         meta = self.get(self.get_key_for_task(task_id))
         meta = self.get(self.get_key_for_task(task_id))
         if not meta:
         if not meta:
             return {"status": states.PENDING, "result": None}
             return {"status": states.PENDING, "result": None}
-        return pickle.loads(str(meta))
+        return self.decode(meta)
 
 
     def _restore_taskset(self, taskset_id):
     def _restore_taskset(self, taskset_id):
         """Get task metadata for a task by id."""
         """Get task metadata for a task by id."""
         meta = self.get(self.get_key_for_taskset(taskset_id))
         meta = self.get(self.get_key_for_taskset(taskset_id))
         if meta:
         if meta:
-            meta = pickle.loads(str(meta))
-            return meta
+            return self.decode(meta)
 
 
 
 
 class DisabledBackend(BaseBackend):
 class DisabledBackend(BaseBackend):
+    _cache = {}   # need this attribute to reset cache in tests.
 
 
     def store_result(self, *args, **kwargs):
     def store_result(self, *args, **kwargs):
         pass
         pass
@@ -352,8 +412,4 @@ class DisabledBackend(BaseBackend):
     def _is_disabled(self, *args, **kwargs):
     def _is_disabled(self, *args, **kwargs):
         raise NotImplementedError("No result backend configured.  "
         raise NotImplementedError("No result backend configured.  "
                 "Please see the documentation for more information.")
                 "Please see the documentation for more information.")
-
-    wait_for = _is_disabled
-    get_status = _is_disabled
-    get_result = _is_disabled
-    get_traceback = _is_disabled
+    wait_for = get_status = get_result = get_traceback = _is_disabled

+ 44 - 9
celery/backends/cache.py

@@ -1,8 +1,11 @@
-from kombu.utils import cached_property
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
 
 
-from celery.backends.base import KeyValueStoreBackend
-from celery.exceptions import ImproperlyConfigured
-from celery.datastructures import LocalCache
+from ..datastructures import LRUCache
+from ..exceptions import ImproperlyConfigured
+from ..utils import cached_property
+
+from .base import KeyValueStoreBackend
 
 
 _imp = [None]
 _imp = [None]
 
 
@@ -20,7 +23,7 @@ def import_best_memcache():
                 raise ImproperlyConfigured(
                 raise ImproperlyConfigured(
                         "Memcached backend requires either the 'pylibmc' "
                         "Memcached backend requires either the 'pylibmc' "
                         "or 'memcache' library")
                         "or 'memcache' library")
-        _imp[0] = is_pylibmc, memcache
+        _imp[0] = (is_pylibmc, memcache)
     return _imp[0]
     return _imp[0]
 
 
 
 
@@ -36,7 +39,7 @@ def get_best_memcache(*args, **kwargs):
 class DummyClient(object):
 class DummyClient(object):
 
 
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
-        self.cache = LocalCache(5000)
+        self.cache = LRUCache(limit=5000)
 
 
     def get(self, key, *args, **kwargs):
     def get(self, key, *args, **kwargs):
         return self.cache.get(key)
         return self.cache.get(key)
@@ -51,6 +54,9 @@ class DummyClient(object):
     def delete(self, key, *args, **kwargs):
     def delete(self, key, *args, **kwargs):
         self.cache.pop(key, None)
         self.cache.pop(key, None)
 
 
+    def incr(self, key, delta=1):
+        return self.cache.incr(key, delta)
+
 
 
 backends = {"memcache": lambda: get_best_memcache,
 backends = {"memcache": lambda: get_best_memcache,
             "memcached": lambda: get_best_memcache,
             "memcached": lambda: get_best_memcache,
@@ -59,6 +65,8 @@ backends = {"memcache": lambda: get_best_memcache,
 
 
 
 
 class CacheBackend(KeyValueStoreBackend):
 class CacheBackend(KeyValueStoreBackend):
+    servers = None
+    supports_native_join = True
 
 
     def __init__(self, expires=None, backend=None, options={}, **kwargs):
     def __init__(self, expires=None, backend=None, options={}, **kwargs):
         super(CacheBackend, self).__init__(self, **kwargs)
         super(CacheBackend, self).__init__(self, **kwargs)
@@ -66,10 +74,11 @@ class CacheBackend(KeyValueStoreBackend):
         self.options = dict(self.app.conf.CELERY_CACHE_BACKEND_OPTIONS,
         self.options = dict(self.app.conf.CELERY_CACHE_BACKEND_OPTIONS,
                             **options)
                             **options)
 
 
-        backend = backend or self.app.conf.CELERY_CACHE_BACKEND
-        self.backend, _, servers = backend.partition("://")
+        self.backend = backend or self.app.conf.CELERY_CACHE_BACKEND
+        if self.backend:
+            self.backend, _, servers = self.backend.partition("://")
+            self.servers = servers.rstrip('/').split(";")
         self.expires = self.prepare_expires(expires, type=int)
         self.expires = self.prepare_expires(expires, type=int)
-        self.servers = servers.rstrip('/').split(";")
         try:
         try:
             self.Client = backends[self.backend]()
             self.Client = backends[self.backend]()
         except KeyError:
         except KeyError:
@@ -90,6 +99,32 @@ class CacheBackend(KeyValueStoreBackend):
     def delete(self, key):
     def delete(self, key):
         return self.client.delete(key)
         return self.client.delete(key)
 
 
+    def on_chord_apply(self, setid, body, result=None, **kwargs):
+        key = self.get_key_for_chord(setid)
+        self.client.set(key, '0', time=86400)
+
+    def on_chord_part_return(self, task, propagate=False):
+        from ..task.sets import subtask
+        from ..result import TaskSetResult
+        setid = task.request.taskset
+        if not setid:
+            return
+        key = self.get_key_for_chord(setid)
+        deps = TaskSetResult.restore(setid, backend=task.backend)
+        if self.client.incr(key) >= deps.total:
+            subtask(task.request.chord).delay(deps.join(propagate=propagate))
+            deps.delete()
+            self.client.delete(key)
+
     @cached_property
     @cached_property
     def client(self):
     def client(self):
         return self.Client(self.servers, **self.options)
         return self.Client(self.servers, **self.options)
+
+    def __reduce__(self, args=(), kwargs={}):
+        servers = ";".join(self.servers)
+        backend = "%s://%s/" % (self.backend, servers)
+        kwargs.update(
+            dict(backend=backend,
+                 expires=self.expires,
+                 options=self.options))
+        return super(CacheBackend, self).__reduce__(args, kwargs)

+ 21 - 10
celery/backends/cassandra.py

@@ -1,4 +1,7 @@
+# -*- coding: utf-8 -*-
 """celery.backends.cassandra"""
 """celery.backends.cassandra"""
+from __future__ import absolute_import
+
 try:
 try:
     import pycassa
     import pycassa
     from thrift import Thrift
     from thrift import Thrift
@@ -11,11 +14,11 @@ import time
 
 
 from datetime import datetime
 from datetime import datetime
 
 
-from celery.backends.base import BaseDictBackend
-from celery.exceptions import ImproperlyConfigured
-from celery.utils.serialization import pickle
-from celery.utils.timeutils import maybe_timedelta, timedelta_seconds
-from celery import states
+from .. import states
+from ..exceptions import ImproperlyConfigured
+from ..utils.timeutils import maybe_timedelta, timedelta_seconds
+
+from .base import BaseDictBackend
 
 
 
 
 class CassandraBackend(BaseDictBackend):
 class CassandraBackend(BaseDictBackend):
@@ -100,7 +103,7 @@ class CassandraBackend(BaseDictBackend):
                     Thrift.TException), exc:
                     Thrift.TException), exc:
                 if time.time() > ts:
                 if time.time() > ts:
                     raise
                     raise
-                self.logger.warn('Cassandra error: %r. Retrying...' % (exc, ))
+                self.logger.warn('Cassandra error: %r. Retrying...', exc)
                 time.sleep(self._retry_wait)
                 time.sleep(self._retry_wait)
 
 
     def _get_column_family(self):
     def _get_column_family(self):
@@ -124,9 +127,9 @@ class CassandraBackend(BaseDictBackend):
             cf = self._get_column_family()
             cf = self._get_column_family()
             date_done = datetime.utcnow()
             date_done = datetime.utcnow()
             meta = {"status": status,
             meta = {"status": status,
-                    "result": pickle.dumps(result),
+                    "result": self.encode(result),
                     "date_done": date_done.strftime('%Y-%m-%dT%H:%M:%SZ'),
                     "date_done": date_done.strftime('%Y-%m-%dT%H:%M:%SZ'),
-                    "traceback": pickle.dumps(traceback)}
+                    "traceback": self.encode(traceback)}
             cf.insert(task_id, meta,
             cf.insert(task_id, meta,
                       ttl=timedelta_seconds(self.expires))
                       ttl=timedelta_seconds(self.expires))
 
 
@@ -142,12 +145,20 @@ class CassandraBackend(BaseDictBackend):
                 meta = {
                 meta = {
                     "task_id": task_id,
                     "task_id": task_id,
                     "status": obj["status"],
                     "status": obj["status"],
-                    "result": pickle.loads(str(obj["result"])),
+                    "result": self.decode(obj["result"]),
                     "date_done": obj["date_done"],
                     "date_done": obj["date_done"],
-                    "traceback": pickle.loads(str(obj["traceback"])),
+                    "traceback": self.decode(obj["traceback"]),
                 }
                 }
             except (KeyError, pycassa.NotFoundException):
             except (KeyError, pycassa.NotFoundException):
                 meta = {"status": states.PENDING, "result": None}
                 meta = {"status": states.PENDING, "result": None}
             return meta
             return meta
 
 
         return self._retry_on_error(_do_get)
         return self._retry_on_error(_do_get)
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(servers=self.servers,
+                 keyspace=self.keyspace,
+                 column_family=self.column_family,
+                 cassandra_options=self.cassandra_options))
+        return super(CassandraBackend, self).__reduce__(args, kwargs)

+ 31 - 11
celery/backends/database.py

@@ -1,11 +1,15 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
 from datetime import datetime
 from datetime import datetime
 
 
-from celery import states
-from celery.backends.base import BaseDictBackend
-from celery.db.models import Task, TaskSet
-from celery.db.session import ResultSession
-from celery.exceptions import ImproperlyConfigured
-from celery.utils.timeutils import maybe_timedelta
+from .. import states
+from ..db.models import Task, TaskSet
+from ..db.session import ResultSession
+from ..exceptions import ImproperlyConfigured
+from ..utils.timeutils import maybe_timedelta
+
+from .base import BaseDictBackend
 
 
 
 
 def _sqlalchemy_installed():
 def _sqlalchemy_installed():
@@ -21,21 +25,30 @@ _sqlalchemy_installed()
 
 
 class DatabaseBackend(BaseDictBackend):
 class DatabaseBackend(BaseDictBackend):
     """The database result backend."""
     """The database result backend."""
+    # ResultSet.iterate should sleep this much between each pool,
+    # to not bombard the database with queries.
+    subpolling_interval = 0.5
 
 
     def __init__(self, dburi=None, expires=None,
     def __init__(self, dburi=None, expires=None,
             engine_options=None, **kwargs):
             engine_options=None, **kwargs):
         super(DatabaseBackend, self).__init__(**kwargs)
         super(DatabaseBackend, self).__init__(**kwargs)
+        conf = self.app.conf
         self.expires = maybe_timedelta(self.prepare_expires(expires))
         self.expires = maybe_timedelta(self.prepare_expires(expires))
-        self.dburi = dburi or self.app.conf.CELERY_RESULT_DBURI
+        self.dburi = dburi or conf.CELERY_RESULT_DBURI
         self.engine_options = dict(engine_options or {},
         self.engine_options = dict(engine_options or {},
-                        **self.app.conf.CELERY_RESULT_ENGINE_OPTIONS or {})
+                        **conf.CELERY_RESULT_ENGINE_OPTIONS or {})
+        self.short_lived_sessions = kwargs.get("short_lived_sessions",
+                                    conf.CELERY_RESULT_DB_SHORT_LIVED_SESSIONS)
         if not self.dburi:
         if not self.dburi:
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
                     "Missing connection string! Do you have "
                     "Missing connection string! Do you have "
                     "CELERY_RESULT_DBURI set to a real value?")
                     "CELERY_RESULT_DBURI set to a real value?")
 
 
     def ResultSession(self):
     def ResultSession(self):
-        return ResultSession(dburi=self.dburi, **self.engine_options)
+        return ResultSession(
+                    dburi=self.dburi,
+                    short_lived_sessions=self.short_lived_sessions,
+                    **self.engine_options)
 
 
     def _store_result(self, task_id, result, status, traceback=None):
     def _store_result(self, task_id, result, status, traceback=None):
         """Store return value and status of an executed task."""
         """Store return value and status of an executed task."""
@@ -116,9 +129,16 @@ class DatabaseBackend(BaseDictBackend):
         expires = self.expires
         expires = self.expires
         try:
         try:
             session.query(Task).filter(
             session.query(Task).filter(
-                    Task.date_done < (datetime.now() - expires)).delete()
+                    Task.date_done < (datetime.utcnow() - expires)).delete()
             session.query(TaskSet).filter(
             session.query(TaskSet).filter(
-                    TaskSet.date_done < (datetime.now() - expires)).delete()
+                    TaskSet.date_done < (datetime.utcnow() - expires)).delete()
             session.commit()
             session.commit()
         finally:
         finally:
             session.close()
             session.close()
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(dburi=self.dburi,
+                 expires=self.expires,
+                 engine_options=self.engine_options))
+        return super(DatabaseBackend, self).__reduce__(args, kwargs)

+ 33 - 13
celery/backends/mongodb.py

@@ -1,4 +1,7 @@
+# -*- coding: utf-8 -*-
 """MongoDB backend for celery."""
 """MongoDB backend for celery."""
+from __future__ import absolute_import
+
 from datetime import datetime
 from datetime import datetime
 
 
 try:
 try:
@@ -6,11 +9,11 @@ try:
 except ImportError:
 except ImportError:
     pymongo = None  # noqa
     pymongo = None  # noqa
 
 
-from celery import states
-from celery.backends.base import BaseDictBackend
-from celery.exceptions import ImproperlyConfigured
-from celery.utils.serialization import pickle
-from celery.utils.timeutils import maybe_timedelta
+from .. import states
+from ..exceptions import ImproperlyConfigured
+from ..utils.timeutils import maybe_timedelta
+
+from .base import BaseDictBackend
 
 
 
 
 class Bunch:
 class Bunch:
@@ -66,8 +69,20 @@ class MongoBackend(BaseDictBackend):
         """Connect to the MongoDB server."""
         """Connect to the MongoDB server."""
         if self._connection is None:
         if self._connection is None:
             from pymongo.connection import Connection
             from pymongo.connection import Connection
-            self._connection = Connection(self.mongodb_host,
-                                          self.mongodb_port)
+
+            # The first pymongo.Connection() argument (host) can be
+            # a list of ['host:port'] elements or a mongodb connection
+            # URI. If this is the case, don't use self.mongodb_port
+            # but let pymongo get the port(s) from the URI instead.
+            # This enables the use of replica sets and sharding.
+            # See pymongo.Connection() for more info.
+            args = [self.mongodb_host]
+            if isinstance(self.mongodb_host, basestring) \
+                    and not self.mongodb_host.startswith("mongodb://"):
+                args.append(self.mongodb_port)
+
+            self._connection = Connection(*args)
+
         return self._connection
         return self._connection
 
 
     def _get_database(self):
     def _get_database(self):
@@ -98,9 +113,9 @@ class MongoBackend(BaseDictBackend):
 
 
         meta = {"_id": task_id,
         meta = {"_id": task_id,
                 "status": status,
                 "status": status,
-                "result": Binary(pickle.dumps(result)),
-                "date_done": datetime.now(),
-                "traceback": Binary(pickle.dumps(traceback))}
+                "result": Binary(self.encode(result)),
+                "date_done": datetime.utcnow(),
+                "traceback": Binary(self.encode(traceback))}
 
 
         db = self._get_database()
         db = self._get_database()
         taskmeta_collection = db[self.mongodb_taskmeta_collection]
         taskmeta_collection = db[self.mongodb_taskmeta_collection]
@@ -120,9 +135,9 @@ class MongoBackend(BaseDictBackend):
         meta = {
         meta = {
             "task_id": obj["_id"],
             "task_id": obj["_id"],
             "status": obj["status"],
             "status": obj["status"],
-            "result": pickle.loads(str(obj["result"])),
+            "result": self.decode(obj["result"]),
             "date_done": obj["date_done"],
             "date_done": obj["date_done"],
-            "traceback": pickle.loads(str(obj["traceback"])),
+            "traceback": self.decode(obj["traceback"]),
         }
         }
 
 
         return meta
         return meta
@@ -133,6 +148,11 @@ class MongoBackend(BaseDictBackend):
         taskmeta_collection = db[self.mongodb_taskmeta_collection]
         taskmeta_collection = db[self.mongodb_taskmeta_collection]
         taskmeta_collection.remove({
         taskmeta_collection.remove({
                 "date_done": {
                 "date_done": {
-                    "$lt": datetime.now() - self.expires,
+                    "$lt": datetime.utcnow() - self.expires,
                  }
                  }
         })
         })
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(expires=self.expires))
+        return super(MongoBackend, self).__reduce__(args, kwargs)

+ 15 - 3
celery/backends/pyredis.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 """
 This is here for backwards compatibility only.
 This is here for backwards compatibility only.
 
 
@@ -6,7 +7,7 @@ Please use :class:`celery.backends.redis.RedisBackend` instead.
 """
 """
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
-from celery.backends import redis
+from . import redis
 
 
 
 
 class RedisBackend(redis.RedisBackend):
 class RedisBackend(redis.RedisBackend):
@@ -17,6 +18,17 @@ class RedisBackend(redis.RedisBackend):
         self.redis_port = redis_port
         self.redis_port = redis_port
         self.redis_db = redis_db
         self.redis_db = redis_db
         self.redis_password = redis_password
         self.redis_password = redis_password
-        super(RedisBackend, self).__init__(host=redis_host,
+        # Changed in order to avoid duplicated arguments
+        super(RedisBackend, self).__init__(**dict(kwargs, host=redis_host,
                                            port=redis_port, db=redis_db,
                                            port=redis_port, db=redis_db,
-                                           password=redis_password, **kwargs)
+                                           password=redis_password))
+
+    def __reduce__(self, args=(), kwargs={}):
+        # Not very useful, but without the following, the redis_* attributes
+        # would not be set.
+        kwargs.update(
+            dict(redis_host=self.redis_host,
+                 redis_port=self.redis_port,
+                 redis_db=self.redis_db,
+                 redis_password=self.redis_password))
+        return super(RedisBackend, self).__reduce__(args, kwargs)

+ 42 - 27
celery/backends/redis.py

@@ -1,9 +1,10 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
-from kombu.utils import cached_property
+from ..exceptions import ImproperlyConfigured
+from ..utils import cached_property
 
 
-from celery.backends.base import KeyValueStoreBackend
-from celery.exceptions import ImproperlyConfigured
+from .base import KeyValueStoreBackend
 
 
 try:
 try:
     import redis
     import redis
@@ -31,8 +32,13 @@ class RedisBackend(KeyValueStoreBackend):
     #: default Redis password (:const:`None`)
     #: default Redis password (:const:`None`)
     password = None
     password = None
 
 
+    #: Maximium number of connections in the pool.
+    max_connections = None
+
+    supports_native_join = True
+
     def __init__(self, host=None, port=None, db=None, password=None,
     def __init__(self, host=None, port=None, db=None, password=None,
-            expires=None, **kwargs):
+            expires=None, max_connections=None, **kwargs):
         super(RedisBackend, self).__init__(**kwargs)
         super(RedisBackend, self).__init__(**kwargs)
         conf = self.app.conf
         conf = self.app.conf
         if self.redis is None:
         if self.redis is None:
@@ -40,9 +46,9 @@ class RedisBackend(KeyValueStoreBackend):
                     "You need to install the redis library in order to use "
                     "You need to install the redis library in order to use "
                   + "Redis result store backend.")
                   + "Redis result store backend.")
 
 
-        # For compatability with the old REDIS_* configuration keys.
+        # For compatibility with the old REDIS_* configuration keys.
         def _get(key):
         def _get(key):
-            for prefix in "REDIS_%s", "CELERY_REDIS_%s":
+            for prefix in "CELERY_REDIS_%s", "REDIS_%s":
                 try:
                 try:
                     return conf[prefix % key]
                     return conf[prefix % key]
                 except KeyError:
                 except KeyError:
@@ -53,6 +59,9 @@ class RedisBackend(KeyValueStoreBackend):
         self.db = db or _get("DB") or self.db
         self.db = db or _get("DB") or self.db
         self.password = password or _get("PASSWORD") or self.password
         self.password = password or _get("PASSWORD") or self.password
         self.expires = self.prepare_expires(expires, type=int)
         self.expires = self.prepare_expires(expires, type=int)
+        self.max_connections = (max_connections
+                                or _get("MAX_CONNECTIONS")
+                                or self.max_connections)
 
 
     def get(self, key):
     def get(self, key):
         return self.client.get(key)
         return self.client.get(key)
@@ -65,36 +74,42 @@ class RedisBackend(KeyValueStoreBackend):
         client.set(key, value)
         client.set(key, value)
         if self.expires is not None:
         if self.expires is not None:
             client.expire(key, self.expires)
             client.expire(key, self.expires)
+        client.publish(key, value)
 
 
     def delete(self, key):
     def delete(self, key):
         self.client.delete(key)
         self.client.delete(key)
 
 
-    def close(self):
-        """Closes the Redis connection."""
-        del(self.client)
-
-    def process_cleanup(self):
-        self.close()
+    def on_chord_apply(self, setid, body, result=None, **kwargs):
+        self.app.TaskSetResult(setid, result).save()
 
 
-    def on_chord_apply(self, *args, **kwargs):
-        pass
-
-    def on_chord_part_return(self, task, keyprefix="chord-unlock-%s"):
-        from celery.task.sets import subtask
-        from celery.result import TaskSetResult
+    def on_chord_part_return(self, task, propagate=False):
+        from ..task.sets import subtask
+        from ..result import TaskSetResult
         setid = task.request.taskset
         setid = task.request.taskset
-        key = keyprefix % setid
+        if not setid:
+            return
+        key = self.get_key_for_chord(setid)
         deps = TaskSetResult.restore(setid, backend=task.backend)
         deps = TaskSetResult.restore(setid, backend=task.backend)
         if self.client.incr(key) >= deps.total:
         if self.client.incr(key) >= deps.total:
-            subtask(task.request.chord).delay(deps.join())
+            subtask(task.request.chord).delay(deps.join(propagate=propagate))
             deps.delete()
             deps.delete()
-        self.client.expire(key, 86400)
+            self.client.delete(key)
+        else:
+            self.client.expire(key, 86400)
 
 
     @cached_property
     @cached_property
     def client(self):
     def client(self):
-        return self.redis.Redis(host=self.host, port=self.port,
-                                db=self.db, password=self.password)
-
-    @client.deleter  # noqa
-    def client(self, client):
-        client.connection.disconnect()
+        pool = self.redis.ConnectionPool(host=self.host, port=self.port,
+                                         db=self.db, password=self.password,
+                                         max_connections=self.max_connections)
+        return self.redis.Redis(connection_pool=pool)
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(host=self.host,
+                 port=self.port,
+                 db=self.db,
+                 password=self.password,
+                 expires=self.expires,
+                 max_connections=self.max_connections))
+        return super(RedisBackend, self).__reduce__(args, kwargs)

+ 12 - 2
celery/backends/tyrant.py

@@ -1,11 +1,15 @@
+# -*- coding: utf-8 -*-
 """celery.backends.tyrant"""
 """celery.backends.tyrant"""
+from __future__ import absolute_import
+
 try:
 try:
     import pytyrant
     import pytyrant
 except ImportError:
 except ImportError:
     pytyrant = None  # noqa
     pytyrant = None  # noqa
 
 
-from celery.backends.base import KeyValueStoreBackend
-from celery.exceptions import ImproperlyConfigured
+from ..exceptions import ImproperlyConfigured
+
+from .base import KeyValueStoreBackend
 
 
 
 
 class TyrantBackend(KeyValueStoreBackend):
 class TyrantBackend(KeyValueStoreBackend):
@@ -82,3 +86,9 @@ class TyrantBackend(KeyValueStoreBackend):
 
 
     def delete(self, key):
     def delete(self, key):
         self.open().pop(key, None)
         self.open().pop(key, None)
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(tyrant_host=self.tyrant_host,
+                 tyrant_port=self.tyrant_port))
+        return super(TyrantBackend, self).__reduce__(args, kwargs)

+ 53 - 32
celery/beat.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.beat
+    ~~~~~~~~~~~
+
+    The Celery periodic task scheduler.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import errno
 import errno
@@ -14,17 +25,15 @@ except ImportError:
 
 
 from datetime import datetime
 from datetime import datetime
 
 
-from kombu.utils import cached_property
-
-from celery import __version__
-from celery import platforms
-from celery import registry
-from celery import signals
-from celery.app import app_or_default
-from celery.log import SilenceRepeated
-from celery.schedules import maybe_schedule, crontab
-from celery.utils import instantiate, maybe_promise
-from celery.utils.timeutils import humanize_seconds
+from . import __version__
+from . import platforms
+from . import registry
+from . import signals
+from .app import app_or_default
+from .log import SilenceRepeated
+from .schedules import maybe_schedule, crontab
+from .utils import cached_property, instantiate, maybe_promise
+from .utils.timeutils import humanize_seconds
 
 
 
 
 class SchedulingError(Exception):
 class SchedulingError(Exception):
@@ -79,14 +88,15 @@ class ScheduleEntry(object):
         self.total_run_count = total_run_count or 0
         self.total_run_count = total_run_count or 0
 
 
     def _default_now(self):
     def _default_now(self):
-        return datetime.now()
+        return datetime.utcnow()
 
 
-    def next(self, last_run_at=None):
+    def _next_instance(self, last_run_at=None):
         """Returns a new instance of the same class, but with
         """Returns a new instance of the same class, but with
         its date and count fields updated."""
         its date and count fields updated."""
         return self.__class__(**dict(self,
         return self.__class__(**dict(self,
-                                     last_run_at=last_run_at or datetime.now(),
-                                     total_run_count=self.total_run_count + 1))
+                                last_run_at=last_run_at or datetime.utcnow(),
+                                total_run_count=self.total_run_count + 1))
+    __next__ = next = _next_instance  # for 2to3
 
 
     def update(self, other):
     def update(self, other):
         """Update values from another entry.
         """Update values from another entry.
@@ -161,15 +171,16 @@ class Scheduler(object):
         is_due, next_time_to_run = entry.is_due()
         is_due, next_time_to_run = entry.is_due()
 
 
         if is_due:
         if is_due:
-            self.logger.debug("Scheduler: Sending due task %s" % entry.task)
+            self.logger.debug("Scheduler: Sending due task %s", entry.task)
             try:
             try:
                 result = self.apply_async(entry, publisher=publisher)
                 result = self.apply_async(entry, publisher=publisher)
             except Exception, exc:
             except Exception, exc:
-                self.logger.error("Message Error: %s\n%s" % (exc,
-                    traceback.format_stack()), exc_info=sys.exc_info())
+                self.logger.error("Message Error: %s\n%s", exc,
+                                  traceback.format_stack(),
+                                  exc_info=sys.exc_info())
             else:
             else:
-                self.logger.debug("%s sent. id->%s" % (entry.task,
-                                                       result.task_id))
+                self.logger.debug("%s sent. id->%s", entry.task,
+                                                     result.task_id)
         return next_time_to_run
         return next_time_to_run
 
 
     def tick(self):
     def tick(self):
@@ -276,13 +287,23 @@ class Scheduler(object):
     def set_schedule(self, schedule):
     def set_schedule(self, schedule):
         self.data = schedule
         self.data = schedule
 
 
+    def _ensure_connected(self):
+        # callback called for each retry while the connection
+        # can't be established.
+        def _error_handler(exc, interval):
+            self.logger.error("Celerybeat: Connection error: %s. "
+                              "Trying again in %s seconds...", exc, interval)
+
+        return self.connection.ensure_connection(_error_handler,
+                    self.app.conf.BROKER_CONNECTION_MAX_RETRIES)
+
     @cached_property
     @cached_property
     def connection(self):
     def connection(self):
         return self.app.broker_connection()
         return self.app.broker_connection()
 
 
     @cached_property
     @cached_property
     def publisher(self):
     def publisher(self):
-        return self.Publisher(connection=self.connection)
+        return self.Publisher(connection=self._ensure_connected())
 
 
     @property
     @property
     def schedule(self):
     def schedule(self):
@@ -316,8 +337,8 @@ class PersistentScheduler(Scheduler):
                                                 writeback=True)
                                                 writeback=True)
             entries = self._store.setdefault("entries", {})
             entries = self._store.setdefault("entries", {})
         except Exception, exc:
         except Exception, exc:
-            self.logger.error("Removing corrupted schedule file %r: %r" % (
-                self.schedule_filename, exc))
+            self.logger.error("Removing corrupted schedule file %r: %r",
+                              self.schedule_filename, exc, exc_info=True)
             self._remove_db()
             self._remove_db()
             self._store = self.persistence.open(self.schedule_filename,
             self._store = self.persistence.open(self.schedule_filename,
                                                 writeback=True)
                                                 writeback=True)
@@ -362,15 +383,15 @@ class Service(object):
         self.schedule_filename = schedule_filename or \
         self.schedule_filename = schedule_filename or \
                                     app.conf.CELERYBEAT_SCHEDULE_FILENAME
                                     app.conf.CELERYBEAT_SCHEDULE_FILENAME
 
 
-        self._shutdown = threading.Event()
-        self._stopped = threading.Event()
+        self._is_shutdown = threading.Event()
+        self._is_stopped = threading.Event()
         self.debug = SilenceRepeated(self.logger.debug,
         self.debug = SilenceRepeated(self.logger.debug,
                         10 if self.max_interval < 60 else 1)
                         10 if self.max_interval < 60 else 1)
 
 
     def start(self, embedded_process=False):
     def start(self, embedded_process=False):
         self.logger.info("Celerybeat: Starting...")
         self.logger.info("Celerybeat: Starting...")
-        self.logger.debug("Celerybeat: Ticking with max interval->%s" % (
-                    humanize_seconds(self.scheduler.max_interval)))
+        self.logger.debug("Celerybeat: Ticking with max interval->%s",
+                          humanize_seconds(self.scheduler.max_interval))
 
 
         signals.beat_init.send(sender=self)
         signals.beat_init.send(sender=self)
         if embedded_process:
         if embedded_process:
@@ -378,24 +399,24 @@ class Service(object):
             platforms.set_process_title("celerybeat")
             platforms.set_process_title("celerybeat")
 
 
         try:
         try:
-            while not self._shutdown.isSet():
+            while not self._is_shutdown.isSet():
                 interval = self.scheduler.tick()
                 interval = self.scheduler.tick()
                 self.debug("Celerybeat: Waking up %s." % (
                 self.debug("Celerybeat: Waking up %s." % (
                         humanize_seconds(interval, prefix="in ")))
                         humanize_seconds(interval, prefix="in ")))
                 time.sleep(interval)
                 time.sleep(interval)
         except (KeyboardInterrupt, SystemExit):
         except (KeyboardInterrupt, SystemExit):
-            self._shutdown.set()
+            self._is_shutdown.set()
         finally:
         finally:
             self.sync()
             self.sync()
 
 
     def sync(self):
     def sync(self):
         self.scheduler.close()
         self.scheduler.close()
-        self._stopped.set()
+        self._is_stopped.set()
 
 
     def stop(self, wait=False):
     def stop(self, wait=False):
         self.logger.info("Celerybeat: Shutting down...")
         self.logger.info("Celerybeat: Shutting down...")
-        self._shutdown.set()
-        wait and self._stopped.wait()  # block until shutdown done.
+        self._is_shutdown.set()
+        wait and self._is_stopped.wait()  # block until shutdown done.
 
 
     def get_scheduler(self, lazy=False):
     def get_scheduler(self, lazy=False):
         filename = self.schedule_filename
         filename = self.schedule_filename

+ 60 - 13
celery/bin/base.py

@@ -1,9 +1,19 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
 import os
 import os
 import sys
 import sys
+import warnings
 
 
 from optparse import OptionParser, make_option as Option
 from optparse import OptionParser, make_option as Option
 
 
-import celery
+from .. import __version__, Celery
+from ..exceptions import CDeprecationWarning, CPendingDeprecationWarning
+
+
+# always enable DeprecationWarnings, so our users can see them.
+for warning in (CDeprecationWarning, CPendingDeprecationWarning):
+    warnings.simplefilter("once", warning, 0)
 
 
 
 
 class Command(object):
 class Command(object):
@@ -13,11 +23,12 @@ class Command(object):
     :keyword get_app: Callable returning the current app if no app provided.
     :keyword get_app: Callable returning the current app if no app provided.
 
 
     """
     """
+    _default_broker_url = r'amqp://guest:guest@localhost:5672//'
     #: Arg list used in help.
     #: Arg list used in help.
     args = ''
     args = ''
 
 
     #: Application version.
     #: Application version.
-    version = celery.__version__
+    version = __version__
 
 
     #: If false the parser will raise an exception if positional
     #: If false the parser will raise an exception if positional
     #: args are provided.
     #: args are provided.
@@ -31,6 +42,10 @@ class Command(object):
             Option("--app",
             Option("--app",
                     default=None, action="store", dest="app",
                     default=None, action="store", dest="app",
                     help="Name of the app instance to use. "),
                     help="Name of the app instance to use. "),
+            Option("-b", "--broker",
+                    default=None, action="store", dest="broker",
+                    help="Broker URL.  Default is %s" % (
+                            _default_broker_url, )),
             Option("--loader",
             Option("--loader",
                    default=None, action="store", dest="loader",
                    default=None, action="store", dest="loader",
                    help="Name of the loader class to use. "
                    help="Name of the loader class to use. "
@@ -91,10 +106,18 @@ class Command(object):
 
 
         """
         """
         options, args = self.parse_options(prog_name, argv)
         options, args = self.parse_options(prog_name, argv)
+        for o in vars(options):
+            v = getattr(options, o)
+            if isinstance(v, basestring):
+                setattr(options, o, os.path.expanduser(v))
+        argv = map(lambda a: isinstance(a, basestring)
+                   and os.path.expanduser(a) or a, argv)
         if not self.supports_args and args:
         if not self.supports_args and args:
             sys.stderr.write(
             sys.stderr.write(
-                "\nUnrecognized command line arguments: %r\n" % (
+                "\nUnrecognized command line arguments: %s\n" % (
                     ", ".join(args), ))
                     ", ".join(args), ))
+            import traceback
+            traceback.print_stack(file=sys.stderr)
             sys.stderr.write("\nTry --help?\n")
             sys.stderr.write("\nTry --help?\n")
             sys.exit(1)
             sys.exit(1)
         return self.run(*args, **vars(options))
         return self.run(*args, **vars(options))
@@ -117,15 +140,28 @@ class Command(object):
                            option_list=(self.preload_options +
                            option_list=(self.preload_options +
                                         self.get_options()))
                                         self.get_options()))
 
 
+    def prepare_preload_options(self, options):
+        """Optional handler to do additional processing of preload options.
+
+        Configuration must not have been initialized
+        until after this is called.
+
+        """
+        pass
+
     def setup_app_from_commandline(self, argv):
     def setup_app_from_commandline(self, argv):
         preload_options = self.parse_preload_options(argv)
         preload_options = self.parse_preload_options(argv)
-        app = (preload_options.pop("app", None) or
+        self.prepare_preload_options(preload_options)
+        app = (preload_options.get("app") or
                os.environ.get("CELERY_APP") or
                os.environ.get("CELERY_APP") or
                self.app)
                self.app)
-        loader = (preload_options.pop("loader", None) or
+        loader = (preload_options.get("loader") or
                   os.environ.get("CELERY_LOADER") or
                   os.environ.get("CELERY_LOADER") or
                   "default")
                   "default")
-        config_module = preload_options.pop("config_module", None)
+        broker = preload_options.get("broker", None)
+        if broker:
+            os.environ["CELERY_BROKER_URL"] = broker
+        config_module = preload_options.get("config_module")
         if config_module:
         if config_module:
             os.environ["CELERY_CONFIG_MODULE"] = config_module
             os.environ["CELERY_CONFIG_MODULE"] = config_module
         if app:
         if app:
@@ -137,7 +173,7 @@ class Command(object):
         return argv
         return argv
 
 
     def get_cls_by_name(self, name):
     def get_cls_by_name(self, name):
-        from celery.utils import get_cls_by_name, import_from_cwd
+        from ..utils import get_cls_by_name, import_from_cwd
         return get_cls_by_name(name, imp=import_from_cwd)
         return get_cls_by_name(name, imp=import_from_cwd)
 
 
     def process_cmdline_config(self, argv):
     def process_cmdline_config(self, argv):
@@ -151,21 +187,32 @@ class Command(object):
 
 
     def parse_preload_options(self, args):
     def parse_preload_options(self, args):
         acc = {}
         acc = {}
-        preload_options = dict((opt._long_opts[0], opt.dest)
-                                for opt in self.preload_options)
-        for arg in args:
+        opts = {}
+        for opt in self.preload_options:
+            for t in (opt._long_opts, opt._short_opts):
+                opts.update(dict(zip(t, [opt.dest] * len(t))))
+        index = 0
+        length = len(args)
+        while index < length:
+            arg = args[index]
             if arg.startswith('--') and '=' in arg:
             if arg.startswith('--') and '=' in arg:
                 key, value = arg.split('=', 1)
                 key, value = arg.split('=', 1)
-                dest = preload_options.get(key)
+                dest = opts.get(key)
                 if dest:
                 if dest:
                     acc[dest] = value
                     acc[dest] = value
+            elif arg.startswith('-'):
+                dest = opts.get(arg)
+                if dest:
+                    acc[dest] = args[index + 1]
+                    index += 1
+            index += 1
         return acc
         return acc
 
 
     def _get_default_app(self, *args, **kwargs):
     def _get_default_app(self, *args, **kwargs):
-        return celery.Celery(*args, **kwargs)
+        return Celery(*args, **kwargs)
 
 
 
 
-def daemon_options(default_pidfile, default_logfile=None):
+def daemon_options(default_pidfile=None, default_logfile=None):
     return (
     return (
         Option('-f', '--logfile', default=default_logfile,
         Option('-f', '--logfile', default=default_logfile,
                action="store", dest="logfile",
                action="store", dest="logfile",

+ 5 - 2
celery/bin/camqadm.py

@@ -1,9 +1,11 @@
-#!/usr/bin/env python
+# -*- coding: utf-8 -*-
 """camqadm
 """camqadm
 
 
 .. program:: camqadm
 .. program:: camqadm
 
 
 """
 """
+from __future__ import absolute_import
+
 import cmd
 import cmd
 import sys
 import sys
 import shlex
 import shlex
@@ -14,9 +16,10 @@ from itertools import count
 from amqplib import client_0_8 as amqp
 from amqplib import client_0_8 as amqp
 
 
 from celery.app import app_or_default
 from celery.app import app_or_default
-from celery.bin.base import Command
 from celery.utils import padlist
 from celery.utils import padlist
 
 
+from celery.bin.base import Command
+
 # Valid string -> bool coercions.
 # Valid string -> bool coercions.
 BOOLS = {"1": True, "0": False,
 BOOLS = {"1": True, "0": False,
          "on": True, "off": False,
          "on": True, "off": False,

+ 16 - 7
celery/bin/celerybeat.py

@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+# -*- coding: utf-8 -*-
 """celerybeat
 """celerybeat
 
 
 .. program:: celerybeat
 .. program:: celerybeat
@@ -22,23 +22,29 @@
     `ERROR`, `CRITICAL`, or `FATAL`.
     `ERROR`, `CRITICAL`, or `FATAL`.
 
 
 """
 """
-from __future__ import with_statement, absolute_import
+from __future__ import with_statement
+from __future__ import absolute_import
+
+import os
 
 
 from functools import partial
 from functools import partial
 
 
 from celery.platforms import detached
 from celery.platforms import detached
+
 from celery.bin.base import Command, Option, daemon_options
 from celery.bin.base import Command, Option, daemon_options
 
 
 
 
 class BeatCommand(Command):
 class BeatCommand(Command):
     supports_args = False
     supports_args = False
+    preload_options = (Command.preload_options
+                     + daemon_options(default_pidfile="celerybeat.pid"))
 
 
     def run(self, detach=False, logfile=None, pidfile=None, uid=None,
     def run(self, detach=False, logfile=None, pidfile=None, uid=None,
             gid=None, umask=None, working_directory=None, **kwargs):
             gid=None, umask=None, working_directory=None, **kwargs):
+        workdir = working_directory
         kwargs.pop("app", None)
         kwargs.pop("app", None)
         beat = partial(self.app.Beat,
         beat = partial(self.app.Beat,
                        logfile=logfile, pidfile=pidfile, **kwargs)
                        logfile=logfile, pidfile=pidfile, **kwargs)
-        workdir = working_directory
 
 
         if detach:
         if detach:
             with detached(logfile, pidfile, uid, gid, umask, workdir):
             with detached(logfile, pidfile, uid, gid, umask, workdir):
@@ -46,6 +52,11 @@ class BeatCommand(Command):
         else:
         else:
             return beat().run()
             return beat().run()
 
 
+    def prepare_preload_options(self, options):
+        workdir = options.get("working_directory")
+        if workdir:
+            os.chdir(workdir)
+
     def get_options(self):
     def get_options(self):
         conf = self.app.conf
         conf = self.app.conf
 
 
@@ -60,7 +71,7 @@ class BeatCommand(Command):
                     "'.db' will be appended to the filename. Default: %s" % (
                     "'.db' will be appended to the filename. Default: %s" % (
                             conf.CELERYBEAT_SCHEDULE_FILENAME, )),
                             conf.CELERYBEAT_SCHEDULE_FILENAME, )),
             Option('--max-interval',
             Option('--max-interval',
-                default=3600.0, type="float", dest="max_interval",
+                default=None, type="float", dest="max_interval",
                 help="Max. seconds to sleep between schedule iterations."),
                 help="Max. seconds to sleep between schedule iterations."),
             Option('-S', '--scheduler',
             Option('-S', '--scheduler',
                 default=None,
                 default=None,
@@ -70,9 +81,7 @@ class BeatCommand(Command):
             Option('-l', '--loglevel',
             Option('-l', '--loglevel',
                 default=conf.CELERYBEAT_LOG_LEVEL,
                 default=conf.CELERYBEAT_LOG_LEVEL,
                 action="store", dest="loglevel",
                 action="store", dest="loglevel",
-                help="Loglevel. One of DEBUG/INFO/WARNING/ERROR/CRITICAL."),
-        ) + daemon_options(default_pidfile="celerybeat.pid",
-                           default_logfile=conf.CELERYBEAT_LOG_FILE)
+                help="Loglevel. One of DEBUG/INFO/WARNING/ERROR/CRITICAL."))
 
 
 
 
 def main():
 def main():

+ 50 - 6
celery/bin/celeryctl.py

@@ -1,3 +1,7 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+from __future__ import with_statement
+
 import sys
 import sys
 
 
 from optparse import OptionParser, make_option as Option
 from optparse import OptionParser, make_option as Option
@@ -8,9 +12,10 @@ from anyjson import deserialize
 
 
 from celery import __version__
 from celery import __version__
 from celery.app import app_or_default, current_app
 from celery.app import app_or_default, current_app
-from celery.bin.base import Command as CeleryCommand
 from celery.utils import term
 from celery.utils import term
 
 
+from celery.bin.base import Command as CeleryCommand
+
 
 
 commands = {}
 commands = {}
 
 
@@ -19,8 +24,8 @@ class Error(Exception):
     pass
     pass
 
 
 
 
-def command(fun):
-    commands[fun.__name__] = fun
+def command(fun, name=None):
+    commands[name or fun.__name__] = fun
     return fun
     return fun
 
 
 
 
@@ -104,6 +109,28 @@ class Command(object):
         return OK, pformat(n)
         return OK, pformat(n)
 
 
 
 
+class list_(Command):
+    args = "<bindings>"
+
+    def list_bindings(self, channel):
+        fmt = lambda q, e, r: self.out("%s %s %s" % (q.ljust(28),
+                                                     e.ljust(28), r))
+        fmt("Queue", "Exchange", "Routing Key")
+        fmt("-" * 16, "-" * 16, "-" * 16)
+        for binding in channel.list_bindings():
+            fmt(*binding)
+
+    def run(self, what, *_, **kw):
+        topics = {"bindings": self.list_bindings}
+        if what not in topics:
+            raise ValueError("%r not in %r" % (what, topics.keys()))
+        with self.app.broker_connection() as conn:
+            self.app.amqp.get_task_consumer(conn).declare()
+            with conn.channel() as channel:
+                return topics[what](channel)
+list_ = command(list_, "list")
+
+
 class apply(Command):
 class apply(Command):
     args = "<task_name>"
     args = "<task_name>"
     option_list = Command.option_list + (
     option_list = Command.option_list + (
@@ -195,7 +222,8 @@ class inspect(Command):
                "reserved": 1.0,
                "reserved": 1.0,
                "stats": 1.0,
                "stats": 1.0,
                "revoked": 1.0,
                "revoked": 1.0,
-               "registered_tasks": 1.0,
+               "registered_tasks": 1.0,  # alias to registered
+               "registered": 1.0,
                "enable_events": 1.0,
                "enable_events": 1.0,
                "disable_events": 1.0,
                "disable_events": 1.0,
                "ping": 0.2,
                "ping": 0.2,
@@ -207,6 +235,7 @@ class inspect(Command):
                     help="Timeout in seconds (float) waiting for reply"),
                     help="Timeout in seconds (float) waiting for reply"),
                 Option("--destination", "-d", dest="destination",
                 Option("--destination", "-d", dest="destination",
                     help="Comma separated list of destination node names."))
                     help="Comma separated list of destination node names."))
+    show_body = True
 
 
     def usage(self, command):
     def usage(self, command):
         return "%%prog %s [options] %s [%s]" % (
         return "%%prog %s [options] %s [%s]" % (
@@ -214,6 +243,7 @@ class inspect(Command):
 
 
     def run(self, *args, **kwargs):
     def run(self, *args, **kwargs):
         self.quiet = kwargs.get("quiet", False)
         self.quiet = kwargs.get("quiet", False)
+        self.show_body = kwargs.get("show_body", True)
         if not args:
         if not args:
             raise Error("Missing inspect command. See --help")
             raise Error("Missing inspect command. See --help")
         command = args[0]
         command = args[0]
@@ -249,7 +279,7 @@ class inspect(Command):
             return
             return
         dirstr = not self.quiet and c.bold(c.white(direction), " ") or ""
         dirstr = not self.quiet and c.bold(c.white(direction), " ") or ""
         self.out(c.reset(dirstr, title))
         self.out(c.reset(dirstr, title))
-        if body and not self.quiet:
+        if body and self.show_body:
             self.out(body)
             self.out(body)
 inspect = command(inspect)
 inspect = command(inspect)
 
 
@@ -265,7 +295,7 @@ class status(Command):
     def run(self, *args, **kwargs):
     def run(self, *args, **kwargs):
         replies = inspect(app=self.app,
         replies = inspect(app=self.app,
                           no_color=kwargs.get("no_color", False)) \
                           no_color=kwargs.get("no_color", False)) \
-                    .run("ping", **dict(kwargs, quiet=True))
+                    .run("ping", **dict(kwargs, quiet=True, show_body=False))
         if not replies:
         if not replies:
             raise Error("No nodes replied within time constraint")
             raise Error("No nodes replied within time constraint")
         nodecount = len(replies)
         nodecount = len(replies)
@@ -307,8 +337,22 @@ class celeryctl(CeleryCommand):
         except Error:
         except Error:
             return self.execute("help", argv)
             return self.execute("help", argv)
 
 
+    def remove_options_at_beginning(self, argv, index=0):
+        if argv:
+            while index <= len(argv):
+                value = argv[index]
+                if value.startswith("--"):
+                    pass
+                elif value.startswith("-"):
+                    index += 1
+                else:
+                    return argv[index:]
+                index += 1
+        return []
+
     def handle_argv(self, prog_name, argv):
     def handle_argv(self, prog_name, argv):
         self.prog_name = prog_name
         self.prog_name = prog_name
+        argv = self.remove_options_at_beginning(argv)
         try:
         try:
             command = argv[0]
             command = argv[0]
         except IndexError:
         except IndexError:

+ 6 - 3
celery/bin/celeryd.py

@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+# -*- coding: utf-8 -*-
 """celeryd
 """celeryd
 
 
 .. program:: celeryd
 .. program:: celeryd
@@ -71,11 +71,13 @@
     terminated and replaced by a new worker.
     terminated and replaced by a new worker.
 
 
 """
 """
+from __future__ import absolute_import
+
 import sys
 import sys
 
 
 try:
 try:
     from multiprocessing import freeze_support
     from multiprocessing import freeze_support
-except ImportError:
+except ImportError:  # pragma: no cover
     freeze_support = lambda: True  # noqa
     freeze_support = lambda: True  # noqa
 
 
 from celery.bin.base import Command, Option
 from celery.bin.base import Command, Option
@@ -106,7 +108,8 @@ class WorkerCommand(Command):
                 default=conf.CELERYD_POOL,
                 default=conf.CELERYD_POOL,
                 action="store", dest="pool", type="str",
                 action="store", dest="pool", type="str",
                 help="Pool implementation: "
                 help="Pool implementation: "
-                     "processes (default), eventlet or gevent."),
+                     "processes (default), eventlet, gevent, "
+                     "solo or threads."),
             Option('--purge', '--discard', default=False,
             Option('--purge', '--discard', default=False,
                 action="store_true", dest="discard",
                 action="store_true", dest="discard",
                 help="Discard all waiting tasks before the server is"
                 help="Discard all waiting tasks before the server is"

+ 17 - 6
celery/bin/celeryd_detach.py

@@ -1,4 +1,6 @@
-from __future__ import absolute_import, with_statement
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+from __future__ import with_statement
 
 
 import os
 import os
 import sys
 import sys
@@ -7,6 +9,7 @@ from optparse import OptionParser, BadOptionError
 
 
 from celery import __version__
 from celery import __version__
 from celery.platforms import detached
 from celery.platforms import detached
+
 from celery.bin.base import daemon_options
 from celery.bin.base import daemon_options
 
 
 OPTION_LIST = daemon_options(default_pidfile="celeryd.pid")
 OPTION_LIST = daemon_options(default_pidfile="celeryd.pid")
@@ -19,11 +22,10 @@ def detach(path, argv, logfile=None, pidfile=None, uid=None,
             os.execv(path, [path] + argv)
             os.execv(path, [path] + argv)
         except Exception:
         except Exception:
             import logging
             import logging
-            from celery.log import setup_logger
+            from ..log import setup_logger
             logger = setup_logger(logfile=logfile, loglevel=logging.ERROR)
             logger = setup_logger(logfile=logfile, loglevel=logging.ERROR)
-            logger.critical("Can't exec %r" % (
-                    " ".join([path] + argv), ),
-                    exc_info=sys.exc_info())
+            logger.critical("Can't exec %r", " ".join([path] + argv),
+                            exc_info=sys.exc_info())
 
 
 
 
 class PartialOptionParser(OptionParser):
 class PartialOptionParser(OptionParser):
@@ -111,10 +113,19 @@ class detached_celeryd(object):
     def execute_from_commandline(self, argv=None):
     def execute_from_commandline(self, argv=None):
         if argv is None:
         if argv is None:
             argv = sys.argv
             argv = sys.argv
+        config = []
+        seen_cargs = 0
+        for arg in argv:
+            if seen_cargs:
+                config.append(arg)
+            else:
+                if arg == "--":
+                    seen_cargs = 1
+                    config.append(arg)
         prog_name = os.path.basename(argv[0])
         prog_name = os.path.basename(argv[0])
         options, values, leftovers = self.parse_options(prog_name, argv[1:])
         options, values, leftovers = self.parse_options(prog_name, argv[1:])
         detach(path=self.execv_path,
         detach(path=self.execv_path,
-               argv=self.execv_argv + leftovers,
+               argv=self.execv_argv + leftovers + config,
                **vars(options))
                **vars(options))
 
 
 
 

+ 45 - 24
celery/bin/celeryd_multi.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 """
 
 
 Examples
 Examples
@@ -45,7 +46,7 @@ Examples
     #   * Three of the workers processes the images and video queue
     #   * Three of the workers processes the images and video queue
     #   * Two of the workers processes the data queue with loglevel DEBUG
     #   * Two of the workers processes the data queue with loglevel DEBUG
     #   * the rest processes the default' queue.
     #   * the rest processes the default' queue.
-    $ celeryd-multi start 10 -l INFO -Q:1-3 images,video -Q:4,5 data
+    $ celeryd-multi start 10 -l INFO -Q:1-3 images,video -Q:4,5:data
         -Q default -L:4,5 DEBUG
         -Q default -L:4,5 DEBUG
 
 
     # You can show the commands necessary to start the workers with
     # You can show the commands necessary to start the workers with
@@ -85,9 +86,10 @@ Examples
     celeryd -n xuzzy.myhost -c 3
     celeryd -n xuzzy.myhost -c 3
 
 
 """
 """
+from __future__ import absolute_import
+
 import errno
 import errno
 import os
 import os
-import shlex
 import signal
 import signal
 import socket
 import socket
 import sys
 import sys
@@ -97,8 +99,9 @@ from subprocess import Popen
 from time import sleep
 from time import sleep
 
 
 from celery import __version__
 from celery import __version__
-from celery.utils import term, import_from_cwd
-from celery.loaders.default import DEFAULT_CONFIG_MODULE
+from celery.platforms import shellsplit
+from celery.utils import term
+from celery.utils.encoding import from_utf8
 
 
 SIGNAMES = set(sig for sig in dir(signal)
 SIGNAMES = set(sig for sig in dir(signal)
                         if sig.startswith("SIG") and "_" not in sig)
                         if sig.startswith("SIG") and "_" not in sig)
@@ -119,6 +122,7 @@ usage: %(prog_name)s start <node1 node2 nodeN|range> [celeryd options]
 
 
 additional options (must appear after command name):
 additional options (must appear after command name):
 
 
+    * --nosplash:   Don't display program info.
     * --quiet:      Don't show as much output.
     * --quiet:      Don't show as much output.
     * --verbose:    Show more output.
     * --verbose:    Show more output.
     * --no-color:   Don't display colors.
     * --no-color:   Don't display colors.
@@ -132,10 +136,13 @@ def main():
 class MultiTool(object):
 class MultiTool(object):
     retcode = 0  # Final exit code.
     retcode = 0  # Final exit code.
 
 
-    def __init__(self):
+    def __init__(self, env=None, fh=None):
+        self.fh = fh or sys.stderr
+        self.env = env
         self.commands = {"start": self.start,
         self.commands = {"start": self.start,
                          "show": self.show,
                          "show": self.show,
                          "stop": self.stop,
                          "stop": self.stop,
+                         "stop_verify": self.stop_verify,
                          "restart": self.restart,
                          "restart": self.restart,
                          "kill": self.kill,
                          "kill": self.kill,
                          "names": self.names,
                          "names": self.names,
@@ -146,10 +153,13 @@ class MultiTool(object):
     def execute_from_commandline(self, argv, cmd="celeryd"):
     def execute_from_commandline(self, argv, cmd="celeryd"):
         argv = list(argv)   # don't modify callers argv.
         argv = list(argv)   # don't modify callers argv.
 
 
-        # Reserve the --quiet|-q/--verbose options.
+        # Reserve the --nosplash|--quiet|-q/--verbose options.
+        self.nosplash = False
         self.quiet = False
         self.quiet = False
         self.verbose = False
         self.verbose = False
         self.no_color = False
         self.no_color = False
+        if "--nosplash" in argv:
+            self.nosplash = argv.pop(argv.index("--nosplash"))
         if "--quiet" in argv:
         if "--quiet" in argv:
             self.quiet = argv.pop(argv.index("--quiet"))
             self.quiet = argv.pop(argv.index("--quiet"))
         if "-q" in argv:
         if "-q" in argv:
@@ -168,14 +178,6 @@ class MultiTool(object):
         if len(argv) == 0 or argv[0][0] == "-":
         if len(argv) == 0 or argv[0][0] == "-":
             return self.error()
             return self.error()
 
 
-        if len(argv) == 1:
-            try:
-                conf = import_from_cwd(os.environ.get("CELERY_CONFIG_MODULE",
-                                                      DEFAULT_CONFIG_MODULE))
-                argv.extend(conf.CELERYD_MULTI_ARGS.split())
-            except (ImportError, AttributeError):
-                pass
-
         try:
         try:
             self.commands[argv[0]](argv[1:], cmd)
             self.commands[argv[0]](argv[1:], cmd)
         except KeyError:
         except KeyError:
@@ -183,9 +185,12 @@ class MultiTool(object):
 
 
         return self.retcode
         return self.retcode
 
 
+    def say(self, msg):
+        self.fh.write("%s\n" % (msg, ))
+
     def names(self, argv, cmd):
     def names(self, argv, cmd):
         p = NamespacedOptionParser(argv)
         p = NamespacedOptionParser(argv)
-        print("\n".join(hostname
+        self.say("\n".join(hostname
                         for hostname, _, _ in multi_args(p, cmd)))
                         for hostname, _, _ in multi_args(p, cmd)))
 
 
     def get(self, argv, cmd):
     def get(self, argv, cmd):
@@ -193,13 +198,13 @@ class MultiTool(object):
         p = NamespacedOptionParser(argv[1:])
         p = NamespacedOptionParser(argv[1:])
         for name, worker, _ in multi_args(p, cmd):
         for name, worker, _ in multi_args(p, cmd):
             if name == wanted:
             if name == wanted:
-                print(" ".join(worker))
+                self.say(" ".join(worker))
                 return
                 return
 
 
     def show(self, argv, cmd):
     def show(self, argv, cmd):
         p = NamespacedOptionParser(argv)
         p = NamespacedOptionParser(argv)
         self.note("> Starting nodes...")
         self.note("> Starting nodes...")
-        print("\n".join(" ".join(worker)
+        self.say("\n".join(" ".join(worker)
                         for _, worker, _ in multi_args(p, cmd)))
                         for _, worker, _ in multi_args(p, cmd)))
 
 
     def start(self, argv, cmd):
     def start(self, argv, cmd):
@@ -339,11 +344,17 @@ class MultiTool(object):
         self._stop_nodes(p, cmd, retry=2, callback=on_node_shutdown)
         self._stop_nodes(p, cmd, retry=2, callback=on_node_shutdown)
         self.retval = int(any(retvals))
         self.retval = int(any(retvals))
 
 
+    def stop_verify(self, argv, cmd):
+        self.splash()
+        p = NamespacedOptionParser(argv)
+        self.with_detacher_default_options(p)
+        return self._stop_nodes(p, cmd, retry=2)
+
     def expand(self, argv, cmd=None):
     def expand(self, argv, cmd=None):
         template = argv[0]
         template = argv[0]
         p = NamespacedOptionParser(argv[1:])
         p = NamespacedOptionParser(argv[1:])
         for _, _, expander in multi_args(p, cmd):
         for _, _, expander in multi_args(p, cmd):
-            print(expander(template))
+            self.say(expander(template))
 
 
     def help(self, argv, cmd=None):
     def help(self, argv, cmd=None):
         say(__doc__)
         say(__doc__)
@@ -353,12 +364,14 @@ class MultiTool(object):
         say(USAGE % {"prog_name": self.prog_name})
         say(USAGE % {"prog_name": self.prog_name})
 
 
     def splash(self):
     def splash(self):
-        c = self.colored
-        self.note(c.cyan("celeryd-multi v%s" % __version__))
+        if not self.nosplash:
+            c = self.colored
+            self.note(c.cyan("celeryd-multi v%s" % __version__))
 
 
     def waitexec(self, argv, path=sys.executable):
     def waitexec(self, argv, path=sys.executable):
-        argstr = shlex.split(" ".join([path] + list(argv)))
-        pipe = Popen(argstr)
+        args = " ".join([path] + list(argv))
+        argstr = shellsplit(from_utf8(args))
+        pipe = Popen(argstr, env=self.env)
         self.info("  %s" % " ".join(argstr))
         self.info("  %s" % " ".join(argstr))
         retcode = pipe.wait()
         retcode = pipe.wait()
         if retcode < 0:
         if retcode < 0:
@@ -387,6 +400,7 @@ class MultiTool(object):
 def multi_args(p, cmd="celeryd", append="", prefix="", suffix=""):
 def multi_args(p, cmd="celeryd", append="", prefix="", suffix=""):
     names = p.values
     names = p.values
     options = dict(p.options)
     options = dict(p.options)
+    passthrough = p.passthrough
     ranges = len(names) == 1
     ranges = len(names) == 1
     if ranges:
     if ranges:
         try:
         try:
@@ -402,6 +416,8 @@ def multi_args(p, cmd="celeryd", append="", prefix="", suffix=""):
                    options.pop("-n", socket.gethostname()))
                    options.pop("-n", socket.gethostname()))
     prefix = options.pop("--prefix", prefix) or ""
     prefix = options.pop("--prefix", prefix) or ""
     suffix = options.pop("--suffix", suffix) or "." + hostname
     suffix = options.pop("--suffix", suffix) or "." + hostname
+    if suffix in ('""', "''"):
+        suffix = ""
 
 
     for ns_name, ns_opts in p.namespaces.items():
     for ns_name, ns_opts in p.namespaces.items():
         if "," in ns_name or (ranges and "-" in ns_name):
         if "," in ns_name or (ranges and "-" in ns_name):
@@ -415,7 +431,8 @@ def multi_args(p, cmd="celeryd", append="", prefix="", suffix=""):
                                 "%n": name})
                                 "%n": name})
         argv = ([expand(cmd)] +
         argv = ([expand(cmd)] +
                 [format_opt(opt, expand(value))
                 [format_opt(opt, expand(value))
-                        for opt, value in p.optmerge(name, options).items()])
+                        for opt, value in p.optmerge(name, options).items()] +
+                [passthrough])
         if append:
         if append:
             argv.append(expand(append))
             argv.append(expand(append))
         yield this_name, argv, expand
         yield this_name, argv, expand
@@ -427,6 +444,7 @@ class NamespacedOptionParser(object):
         self.args = args
         self.args = args
         self.options = {}
         self.options = {}
         self.values = []
         self.values = []
+        self.passthrough = ""
         self.namespaces = defaultdict(lambda: {})
         self.namespaces = defaultdict(lambda: {})
 
 
         self.parse()
         self.parse()
@@ -436,7 +454,10 @@ class NamespacedOptionParser(object):
         pos = 0
         pos = 0
         while pos < len(rargs):
         while pos < len(rargs):
             arg = rargs[pos]
             arg = rargs[pos]
-            if arg[0] == "-":
+            if arg == "--":
+                self.passthrough = " ".join(rargs[pos:])
+                break
+            elif arg[0] == "-":
                 if arg[1] == "-":
                 if arg[1] == "-":
                     self.process_long_opt(arg[2:])
                     self.process_long_opt(arg[2:])
                 else:
                 else:

+ 13 - 4
celery/bin/celeryev.py

@@ -1,16 +1,22 @@
-from __future__ import absolute_import, with_statement
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+from __future__ import with_statement
 
 
+import os
 import sys
 import sys
 
 
 from functools import partial
 from functools import partial
 
 
 from celery import platforms
 from celery import platforms
 from celery.platforms import detached
 from celery.platforms import detached
+
 from celery.bin.base import Command, Option, daemon_options
 from celery.bin.base import Command, Option, daemon_options
 
 
 
 
 class EvCommand(Command):
 class EvCommand(Command):
     supports_args = False
     supports_args = False
+    preload_options = (Command.preload_options
+                     + daemon_options(default_pidfile="celeryev.pid"))
 
 
     def run(self, dump=False, camera=None, frequency=1.0, maxrate=None,
     def run(self, dump=False, camera=None, frequency=1.0, maxrate=None,
             loglevel="INFO", logfile=None, prog_name="celeryev",
             loglevel="INFO", logfile=None, prog_name="celeryev",
@@ -29,6 +35,11 @@ class EvCommand(Command):
                                   detach=detach)
                                   detach=detach)
         return self.run_evtop()
         return self.run_evtop()
 
 
+    def prepare_preload_options(self, options):
+        workdir = options.get("working_directory")
+        if workdir:
+            os.chdir(workdir)
+
     def run_evdump(self):
     def run_evdump(self):
         from celery.events.dumper import evdump
         from celery.events.dumper import evdump
         self.set_process_status("dump")
         self.set_process_status("dump")
@@ -80,9 +91,7 @@ class EvCommand(Command):
                    help="Recording: Shutter rate limit (e.g. 10/m)"),
                    help="Recording: Shutter rate limit (e.g. 10/m)"),
             Option('-l', '--loglevel',
             Option('-l', '--loglevel',
                    action="store", dest="loglevel", default="INFO",
                    action="store", dest="loglevel", default="INFO",
-                   help="Loglevel. Default is WARNING."),
-        ) + daemon_options(default_pidfile="celeryev.pid",
-                           default_logfile=None)
+                   help="Loglevel. Default is WARNING."))
 
 
 
 
 def main():
 def main():

+ 6 - 3
celery/concurrency/__init__.py

@@ -1,9 +1,12 @@
-from celery.utils import get_cls_by_name
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from ..utils import get_cls_by_name
 
 
 ALIASES = {
 ALIASES = {
     "processes": "celery.concurrency.processes.TaskPool",
     "processes": "celery.concurrency.processes.TaskPool",
-    "eventlet": "celery.concurrency.evlet.TaskPool",
-    "gevent": "celery.concurrency.evg.TaskPool",
+    "eventlet": "celery.concurrency.eventlet.TaskPool",
+    "gevent": "celery.concurrency.gevent.TaskPool",
     "threads": "celery.concurrency.threads.TaskPool",
     "threads": "celery.concurrency.threads.TaskPool",
     "solo": "celery.concurrency.solo.TaskPool",
     "solo": "celery.concurrency.solo.TaskPool",
 }
 }

+ 23 - 10
celery/concurrency/base.py

@@ -1,3 +1,7 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import logging
 import os
 import os
 import sys
 import sys
 import time
 import time
@@ -5,9 +9,10 @@ import traceback
 
 
 from functools import partial
 from functools import partial
 
 
-from celery import log
-from celery.datastructures import ExceptionInfo
-from celery.utils import timer2
+from .. import log
+from ..datastructures import ExceptionInfo
+from ..utils import timer2
+from ..utils.encoding import safe_repr
 
 
 
 
 def apply_target(target, args=(), kwargs={}, callback=None,
 def apply_target(target, args=(), kwargs={}, callback=None,
@@ -25,6 +30,7 @@ class BasePool(object):
     Timer = timer2.Timer
     Timer = timer2.Timer
 
 
     signal_safe = True
     signal_safe = True
+    rlimit_safe = True
     is_green = False
     is_green = False
 
 
     _state = None
     _state = None
@@ -35,6 +41,7 @@ class BasePool(object):
         self.putlocks = putlocks
         self.putlocks = putlocks
         self.logger = logger or log.get_default_logger()
         self.logger = logger or log.get_default_logger()
         self.options = options
         self.options = options
+        self.does_debug = self.logger.isEnabledFor(logging.DEBUG)
 
 
     def on_start(self):
     def on_start(self):
         pass
         pass
@@ -74,7 +81,7 @@ class BasePool(object):
             soft_timeout=None, timeout=None, **compat):
             soft_timeout=None, timeout=None, **compat):
         """Equivalent of the :func:`apply` built-in function.
         """Equivalent of the :func:`apply` built-in function.
 
 
-        Callbacks should optimally return as soon as possible ince
+        Callbacks should optimally return as soon as possible since
         otherwise the thread which handles the result will get blocked.
         otherwise the thread which handles the result will get blocked.
 
 
         """
         """
@@ -84,8 +91,9 @@ class BasePool(object):
         on_ready = partial(self.on_ready, callback, errback)
         on_ready = partial(self.on_ready, callback, errback)
         on_worker_error = partial(self.on_worker_error, errback)
         on_worker_error = partial(self.on_worker_error, errback)
 
 
-        self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)" % (
-            target, args, kwargs))
+        if self.does_debug:
+            self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)",
+                            target, safe_repr(args), safe_repr(kwargs))
 
 
         return self.on_apply(target, args, kwargs,
         return self.on_apply(target, args, kwargs,
                              callback=on_ready,
                              callback=on_ready,
@@ -108,16 +116,17 @@ class BasePool(object):
         else:
         else:
             self.safe_apply_callback(callback, ret_value)
             self.safe_apply_callback(callback, ret_value)
 
 
-    def on_worker_error(self, errback, exc):
-        errback(ExceptionInfo((exc.__class__, exc, None)))
+    def on_worker_error(self, errback, exc_info):
+        errback(exc_info)
 
 
     def safe_apply_callback(self, fun, *args):
     def safe_apply_callback(self, fun, *args):
         if fun:
         if fun:
             try:
             try:
                 fun(*args)
                 fun(*args)
             except BaseException:
             except BaseException:
-                self.logger.error("Pool callback raised exception: %s" % (
-                    traceback.format_exc(), ), exc_info=sys.exc_info())
+                self.logger.error("Pool callback raised exception: %s",
+                                  traceback.format_exc(),
+                                  exc_info=sys.exc_info())
 
 
     def _get_info(self):
     def _get_info(self):
         return {}
         return {}
@@ -129,3 +138,7 @@ class BasePool(object):
     @property
     @property
     def active(self):
     def active(self):
         return self._state == self.RUN
         return self._state == self.RUN
+
+    @property
+    def num_processes(self):
+        return self.limit

+ 17 - 7
celery/concurrency/evlet.py → celery/concurrency/eventlet.py

@@ -1,3 +1,6 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
 import os
 import os
 import sys
 import sys
 
 
@@ -9,9 +12,10 @@ if not os.environ.get("EVENTLET_NOPATCH"):
     eventlet.monkey_patch()
     eventlet.monkey_patch()
     eventlet.debug.hub_prevent_multiple_readers(False)
     eventlet.debug.hub_prevent_multiple_readers(False)
 
 
-from celery import signals
-from celery.concurrency import base
-from celery.utils import timer2
+from .. import signals
+from ..utils import timer2
+
+from . import base
 
 
 
 
 def apply_target(target, args=(), kwargs={}, callback=None,
 def apply_target(target, args=(), kwargs={}, callback=None,
@@ -23,12 +27,12 @@ def apply_target(target, args=(), kwargs={}, callback=None,
 class Schedule(timer2.Schedule):
 class Schedule(timer2.Schedule):
 
 
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
-        from eventlet.greenthread import spawn_after_local
+        from eventlet.greenthread import spawn_after
         from greenlet import GreenletExit
         from greenlet import GreenletExit
         super(Schedule, self).__init__(*args, **kwargs)
         super(Schedule, self).__init__(*args, **kwargs)
 
 
         self.GreenletExit = GreenletExit
         self.GreenletExit = GreenletExit
-        self._spawn_after_local = spawn_after_local
+        self._spawn_after = spawn_after
         self._queue = set()
         self._queue = set()
 
 
     def enter(self, entry, eta=None, priority=0):
     def enter(self, entry, eta=None, priority=0):
@@ -43,7 +47,7 @@ class Schedule(timer2.Schedule):
             eta = now
             eta = now
         secs = max(eta - now, 0)
         secs = max(eta - now, 0)
 
 
-        g = self._spawn_after_local(secs, entry)
+        g = self._spawn_after(secs, entry)
         self._queue.add(g)
         self._queue.add(g)
         g.link(self._entry_exit, entry)
         g.link(self._entry_exit, entry)
         g.entry = entry
         g.entry = entry
@@ -68,7 +72,7 @@ class Schedule(timer2.Schedule):
         while queue:
         while queue:
             try:
             try:
                 queue.pop().cancel()
                 queue.pop().cancel()
-            except KeyError:
+            except (KeyError, self.GreenletExit):
                 pass
                 pass
 
 
     @property
     @property
@@ -85,6 +89,12 @@ class Timer(timer2.Timer):
     def stop(self):
     def stop(self):
         self.schedule.clear()
         self.schedule.clear()
 
 
+    def cancel(self, tref):
+        try:
+            tref.cancel()
+        except self.schedule.GreenletExit:
+            pass
+
     def start(self):
     def start(self):
         pass
         pass
 
 

+ 7 - 2
celery/concurrency/evg.py → celery/concurrency/gevent.py

@@ -1,3 +1,6 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
 import os
 import os
 import sys
 import sys
 
 
@@ -7,8 +10,9 @@ if not os.environ.get("GEVENT_NOPATCH"):
     from gevent import monkey
     from gevent import monkey
     monkey.patch_all()
     monkey.patch_all()
 
 
-from celery.concurrency.base import apply_target, BasePool
-from celery.utils import timer2
+from ..utils import timer2
+
+from .base import apply_target, BasePool
 
 
 
 
 class Schedule(timer2.Schedule):
 class Schedule(timer2.Schedule):
@@ -84,6 +88,7 @@ class TaskPool(BasePool):
     Timer = Timer
     Timer = Timer
 
 
     signal_safe = False
     signal_safe = False
+    rlimit_safe = False
     is_green = True
     is_green = True
 
 
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):

+ 12 - 11
celery/concurrency/processes/__init__.py

@@ -1,22 +1,19 @@
-"""
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
 
 
-Process Pools.
-
-"""
 import platform
 import platform
 import signal as _signal
 import signal as _signal
 
 
-from os import kill as _kill
-
-from celery.concurrency.base import BasePool
-from celery.concurrency.processes.pool import Pool, RUN
+from ..base import BasePool
+from .pool import Pool, RUN
 
 
-if platform.system() == "Windows":
+if platform.system() == "Windows":  # pragma: no cover
     # On Windows os.kill calls TerminateProcess which cannot be
     # On Windows os.kill calls TerminateProcess which cannot be
     # handled by # any process, so this is needed to terminate the task
     # handled by # any process, so this is needed to terminate the task
     # *and its children* (if any).
     # *and its children* (if any).
-    from celery.concurrency.processes import _win
-    _kill = _win.kill_processtree  # noqa
+    from ._win import kill_processtree as _kill  # noqa
+else:
+    from os import kill as _kill                 # noqa
 
 
 
 
 class TaskPool(BasePool):
 class TaskPool(BasePool):
@@ -77,3 +74,7 @@ class TaskPool(BasePool):
                 "max-tasks-per-child": self._pool._maxtasksperchild,
                 "max-tasks-per-child": self._pool._maxtasksperchild,
                 "put-guarded-by-semaphore": self.putlocks,
                 "put-guarded-by-semaphore": self.putlocks,
                 "timeouts": (self._pool.soft_timeout, self._pool.timeout)}
                 "timeouts": (self._pool.soft_timeout, self._pool.timeout)}
+
+    @property
+    def num_processes(self):
+        return self._pool._processes

+ 3 - 2
celery/concurrency/processes/_win.py

@@ -1,6 +1,7 @@
-import os
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
 
 
-__all__ = ["get_processtree_pids", "kill_processtree"]
+import os
 
 
 # psutil is painfully slow in win32. So to avoid adding big
 # psutil is painfully slow in win32. So to avoid adding big
 # dependencies like pywin32 a ctypes based solution is preferred
 # dependencies like pywin32 a ctypes based solution is preferred

+ 112 - 46
celery/concurrency/processes/pool.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 #
 #
 # Module providing the `Pool` class for managing a process pool
 # Module providing the `Pool` class for managing a process pool
 #
 #
@@ -5,8 +6,7 @@
 #
 #
 # Copyright (c) 2007-2008, R Oudkerk --- see COPYING.txt
 # Copyright (c) 2007-2008, R Oudkerk --- see COPYING.txt
 #
 #
-
-__all__ = ['Pool']
+from __future__ import absolute_import
 
 
 #
 #
 # Imports
 # Imports
@@ -22,11 +22,13 @@ import collections
 import time
 import time
 import signal
 import signal
 import warnings
 import warnings
+import logging
 
 
 from multiprocessing import Process, cpu_count, TimeoutError, Event
 from multiprocessing import Process, cpu_count, TimeoutError, Event
 from multiprocessing import util
 from multiprocessing import util
 from multiprocessing.util import Finalize, debug
 from multiprocessing.util import Finalize, debug
 
 
+from celery.datastructures import ExceptionInfo
 from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded
 from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded
 from celery.exceptions import WorkerLostError
 from celery.exceptions import WorkerLostError
 
 
@@ -74,16 +76,30 @@ class LaxBoundedSemaphore(threading._Semaphore):
         _Semaphore.__init__(self, value, verbose)
         _Semaphore.__init__(self, value, verbose)
         self._initial_value = value
         self._initial_value = value
 
 
-    def release(self):
-        if self._Semaphore__value < self._initial_value:
-            _Semaphore.release(self)
-        if __debug__:
-            self._note("%s.release: success, value=%s (unchanged)" % (
-                self, self._Semaphore__value))
+    if sys.version_info >= (3, 0):
+
+        def release(self):
+            if self._value < self._initial_value:
+                _Semaphore.release(self)
+            if __debug__:
+                self._note("%s.release: success, value=%s (unchanged)" % (
+                    self, self._value))
+
+        def clear(self):
+            while self._value < self._initial_value:
+                _Semaphore.release(self)
+    else:
+
+        def release(self):  # noqa
+            if self._Semaphore__value < self._initial_value:
+                _Semaphore.release(self)
+            if __debug__:
+                self._note("%s.release: success, value=%s (unchanged)" % (
+                    self, self._Semaphore__value))
 
 
-    def clear(self):
-        while self._Semaphore__value < self._initial_value:
-            _Semaphore.release(self)
+        def clear(self):  # noqa
+            while self._Semaphore__value < self._initial_value:
+                _Semaphore.release(self)
 
 
 #
 #
 # Exceptions
 # Exceptions
@@ -118,8 +134,19 @@ def soft_timeout_sighandler(signum, frame):
 #
 #
 
 
 
 
-def worker(inqueue, outqueue, initializer=None, initargs=(), 
+def worker(inqueue, outqueue, initializer=None, initargs=(),
            maxtasks=None, sentinel=None):
            maxtasks=None, sentinel=None):
+    # Re-init logging system.
+    # Workaround for http://bugs.python.org/issue6721#msg140215
+    # Python logging module uses RLock() objects which are broken after
+    # fork. This can result in a deadlock (Issue #496).
+    logger_names = logging.Logger.manager.loggerDict.keys()
+    logger_names.append(None)  # for root logger
+    for name in logger_names:
+        for handler in logging.getLogger(name).handlers:
+            handler.createLock()
+    logging._lock = threading.RLock()
+
     pid = os.getpid()
     pid = os.getpid()
     assert maxtasks is None or (type(maxtasks) == int and maxtasks > 0)
     assert maxtasks is None or (type(maxtasks) == int and maxtasks > 0)
     put = outqueue.put
     put = outqueue.put
@@ -171,13 +198,15 @@ def worker(inqueue, outqueue, initializer=None, initargs=(),
         put((ACK, (job, i, time.time(), pid)))
         put((ACK, (job, i, time.time(), pid)))
         try:
         try:
             result = (True, func(*args, **kwds))
             result = (True, func(*args, **kwds))
-        except Exception, e:
-            result = (False, e)
+        except Exception:
+            result = (False, ExceptionInfo(sys.exc_info()))
         try:
         try:
             put((READY, (job, i, result)))
             put((READY, (job, i, result)))
         except Exception, exc:
         except Exception, exc:
+            _, _, tb = sys.exc_info()
             wrapped = MaybeEncodingError(exc, result[1])
             wrapped = MaybeEncodingError(exc, result[1])
-            put((READY, (job, i, (False, wrapped))))
+            einfo = ExceptionInfo((MaybeEncodingError, wrapped, tb))
+            put((READY, (job, i, (False, einfo))))
 
 
         completed += 1
         completed += 1
     debug('worker exiting after %d tasks' % completed)
     debug('worker exiting after %d tasks' % completed)
@@ -325,7 +354,12 @@ class TimeoutHandler(PoolThread):
                 return
                 return
             debug('hard time limit exceeded for %i', i)
             debug('hard time limit exceeded for %i', i)
             # Remove from cache and set return value to an exception
             # Remove from cache and set return value to an exception
-            job._set(i, (False, TimeLimitExceeded(hard_timeout)))
+            exc_info = None
+            try:
+                raise TimeLimitExceeded(hard_timeout)
+            except TimeLimitExceeded:
+                exc_info = sys.exc_info()
+            job._set(i, (False, ExceptionInfo(exc_info)))
 
 
             # Remove from _pool
             # Remove from _pool
             process, _index = _process_by_pid(job._worker_pid)
             process, _index = _process_by_pid(job._worker_pid)
@@ -413,7 +447,7 @@ class ResultHandler(PoolThread):
         debug('result handler starting')
         debug('result handler starting')
         while 1:
         while 1:
             try:
             try:
-                ready, task = poll(0.2)
+                ready, task = poll(1.0)
             except (IOError, EOFError), exc:
             except (IOError, EOFError), exc:
                 debug('result handler got %r -- exiting' % (exc, ))
                 debug('result handler got %r -- exiting' % (exc, ))
                 return
                 return
@@ -433,7 +467,7 @@ class ResultHandler(PoolThread):
         time_terminate = None
         time_terminate = None
         while cache and self._state != TERMINATE:
         while cache and self._state != TERMINATE:
             try:
             try:
-                ready, task = poll(0.2)
+                ready, task = poll(1.0)
             except (IOError, EOFError), exc:
             except (IOError, EOFError), exc:
                 debug('result handler got %r -- exiting' % (exc, ))
                 debug('result handler got %r -- exiting' % (exc, ))
                 return
                 return
@@ -567,7 +601,7 @@ class Pool(object):
         self._poolctrl[w.pid] = sentinel
         self._poolctrl[w.pid] = sentinel
         return w
         return w
 
 
-    def _join_exited_workers(self, shutdown=False, lost_worker_timeout=10.0):
+    def _join_exited_workers(self, shutdown=False):
         """Cleanup after any worker processes which have exited due to
         """Cleanup after any worker processes which have exited due to
         reaching their specified lifetime. Returns True if any workers were
         reaching their specified lifetime. Returns True if any workers were
         cleaned up.
         cleaned up.
@@ -575,13 +609,18 @@ class Pool(object):
         now = None
         now = None
         # The worker may have published a result before being terminated,
         # The worker may have published a result before being terminated,
         # but we have no way to accurately tell if it did.  So we wait for
         # but we have no way to accurately tell if it did.  So we wait for
-        # 10 seconds before we mark the job with WorkerLostError.
+        # _lost_worker_timeout seconds before we mark the job with
+        # WorkerLostError.
         for job in [job for job in self._cache.values()
         for job in [job for job in self._cache.values()
                 if not job.ready() and job._worker_lost]:
                 if not job.ready() and job._worker_lost]:
             now = now or time.time()
             now = now or time.time()
-            if now - job._worker_lost > lost_worker_timeout:
-                err = WorkerLostError("Worker exited prematurely.")
-                job._set(None, (False, err))
+            if now - job._worker_lost > job._lost_worker_timeout:
+                exc_info = None
+                try:
+                    raise WorkerLostError("Worker exited prematurely.")
+                except WorkerLostError:
+                    exc_info = ExceptionInfo(sys.exc_info())
+                job._set(None, (False, exc_info))
 
 
         if shutdown and not len(self._pool):
         if shutdown and not len(self._pool):
             raise WorkersJoined()
             raise WorkersJoined()
@@ -601,10 +640,11 @@ class Pool(object):
             for job in self._cache.values():
             for job in self._cache.values():
                 for worker_pid in job.worker_pids():
                 for worker_pid in job.worker_pids():
                     if worker_pid in cleaned and not job.ready():
                     if worker_pid in cleaned and not job.ready():
-                        if self._putlock is not None:
-                            self._putlock.release()
                         job._worker_lost = time.time()
                         job._worker_lost = time.time()
                         continue
                         continue
+            if self._putlock is not None:
+                for worker in cleaned:
+                    self._putlock.release()
             return True
             return True
         return False
         return False
 
 
@@ -701,39 +741,44 @@ class Pool(object):
         assert self._state == RUN
         assert self._state == RUN
         return self.map_async(func, iterable, chunksize).get()
         return self.map_async(func, iterable, chunksize).get()
 
 
-    def imap(self, func, iterable, chunksize=1):
+    def imap(self, func, iterable, chunksize=1, lost_worker_timeout=10.0):
         '''
         '''
         Equivalent of `itertools.imap()` -- can be MUCH slower
         Equivalent of `itertools.imap()` -- can be MUCH slower
         than `Pool.map()`
         than `Pool.map()`
         '''
         '''
         assert self._state == RUN
         assert self._state == RUN
         if chunksize == 1:
         if chunksize == 1:
-            result = IMapIterator(self._cache)
+            result = IMapIterator(self._cache,
+                                  lost_worker_timeout=lost_worker_timeout)
             self._taskqueue.put((((result._job, i, func, (x,), {})
             self._taskqueue.put((((result._job, i, func, (x,), {})
                          for i, x in enumerate(iterable)), result._set_length))
                          for i, x in enumerate(iterable)), result._set_length))
             return result
             return result
         else:
         else:
             assert chunksize > 1
             assert chunksize > 1
             task_batches = Pool._get_tasks(func, iterable, chunksize)
             task_batches = Pool._get_tasks(func, iterable, chunksize)
-            result = IMapIterator(self._cache)
+            result = IMapIterator(self._cache,
+                                  lost_worker_timeout=lost_worker_timeout)
             self._taskqueue.put((((result._job, i, mapstar, (x,), {})
             self._taskqueue.put((((result._job, i, mapstar, (x,), {})
                      for i, x in enumerate(task_batches)), result._set_length))
                      for i, x in enumerate(task_batches)), result._set_length))
             return (item for chunk in result for item in chunk)
             return (item for chunk in result for item in chunk)
 
 
-    def imap_unordered(self, func, iterable, chunksize=1):
+    def imap_unordered(self, func, iterable, chunksize=1,
+                       lost_worker_timeout=10.0):
         '''
         '''
         Like `imap()` method but ordering of results is arbitrary
         Like `imap()` method but ordering of results is arbitrary
         '''
         '''
         assert self._state == RUN
         assert self._state == RUN
         if chunksize == 1:
         if chunksize == 1:
-            result = IMapUnorderedIterator(self._cache)
+            result = IMapUnorderedIterator(self._cache,
+                    lost_worker_timeout=lost_worker_timeout)
             self._taskqueue.put((((result._job, i, func, (x,), {})
             self._taskqueue.put((((result._job, i, func, (x,), {})
                          for i, x in enumerate(iterable)), result._set_length))
                          for i, x in enumerate(iterable)), result._set_length))
             return result
             return result
         else:
         else:
             assert chunksize > 1
             assert chunksize > 1
             task_batches = Pool._get_tasks(func, iterable, chunksize)
             task_batches = Pool._get_tasks(func, iterable, chunksize)
-            result = IMapUnorderedIterator(self._cache)
+            result = IMapUnorderedIterator(self._cache,
+                    lost_worker_timeout=lost_worker_timeout)
             self._taskqueue.put((((result._job, i, mapstar, (x,), {})
             self._taskqueue.put((((result._job, i, mapstar, (x,), {})
                      for i, x in enumerate(task_batches)), result._set_length))
                      for i, x in enumerate(task_batches)), result._set_length))
             return (item for chunk in result for item in chunk)
             return (item for chunk in result for item in chunk)
@@ -762,19 +807,21 @@ class Pool(object):
             warnings.warn(UserWarning("Soft timeouts are not supported: "
             warnings.warn(UserWarning("Soft timeouts are not supported: "
                     "on this platform: It does not have the SIGUSR1 signal."))
                     "on this platform: It does not have the SIGUSR1 signal."))
             soft_timeout = None
             soft_timeout = None
-        result = ApplyResult(self._cache, callback,
-                             accept_callback, timeout_callback,
-                             error_callback, soft_timeout, timeout)
-
         if waitforslot and self._putlock is not None:
         if waitforslot and self._putlock is not None:
-            self._putlock.acquire()
-            if self._state != RUN:
-                return
-        if timeout or soft_timeout:
-            # start the timeout handler thread when required.
-            self._start_timeout_handler()
-        self._taskqueue.put(([(result._job, None, func, args, kwds)], None))
-        return result
+            while 1:
+                if self._state != RUN or self._putlock.acquire(False):
+                    break
+                time.sleep(1.0)
+        if self._state == RUN:
+            result = ApplyResult(self._cache, callback,
+                                 accept_callback, timeout_callback,
+                                 error_callback, soft_timeout, timeout)
+            if timeout or soft_timeout:
+                # start the timeout handler thread when required.
+                self._start_timeout_handler()
+            self._taskqueue.put(([(result._job, None,
+                                   func, args, kwds)], None))
+            return result
 
 
     def map_async(self, func, iterable, chunksize=None, callback=None):
     def map_async(self, func, iterable, chunksize=None, callback=None):
         '''
         '''
@@ -912,7 +959,7 @@ class ApplyResult(object):
 
 
     def __init__(self, cache, callback, accept_callback=None,
     def __init__(self, cache, callback, accept_callback=None,
             timeout_callback=None, error_callback=None, soft_timeout=None,
             timeout_callback=None, error_callback=None, soft_timeout=None,
-            timeout=None):
+            timeout=None, lost_worker_timeout=10.0):
         self._mutex = threading.Lock()
         self._mutex = threading.Lock()
         self._cond = threading.Condition(threading.Lock())
         self._cond = threading.Condition(threading.Lock())
         self._job = job_counter.next()
         self._job = job_counter.next()
@@ -924,6 +971,7 @@ class ApplyResult(object):
         self._timeout_callback = timeout_callback
         self._timeout_callback = timeout_callback
         self._timeout = timeout
         self._timeout = timeout
         self._soft_timeout = soft_timeout
         self._soft_timeout = soft_timeout
+        self._lost_worker_timeout = lost_worker_timeout
 
 
         self._accepted = False
         self._accepted = False
         self._worker_pid = None
         self._worker_pid = None
@@ -1067,15 +1115,19 @@ class MapResult(ApplyResult):
 
 
 
 
 class IMapIterator(object):
 class IMapIterator(object):
+    _worker_lost = None
 
 
-    def __init__(self, cache):
+    def __init__(self, cache, lost_worker_timeout=10.0):
         self._cond = threading.Condition(threading.Lock())
         self._cond = threading.Condition(threading.Lock())
         self._job = job_counter.next()
         self._job = job_counter.next()
         self._cache = cache
         self._cache = cache
         self._items = collections.deque()
         self._items = collections.deque()
         self._index = 0
         self._index = 0
         self._length = None
         self._length = None
+        self._ready = False
         self._unsorted = {}
         self._unsorted = {}
+        self._worker_pids = []
+        self._lost_worker_timeout = lost_worker_timeout
         cache[self._job] = self
         cache[self._job] = self
 
 
     def __iter__(self):
     def __iter__(self):
@@ -1088,12 +1140,14 @@ class IMapIterator(object):
                 item = self._items.popleft()
                 item = self._items.popleft()
             except IndexError:
             except IndexError:
                 if self._index == self._length:
                 if self._index == self._length:
+                    self._ready = True
                     raise StopIteration
                     raise StopIteration
                 self._cond.wait(timeout)
                 self._cond.wait(timeout)
                 try:
                 try:
                     item = self._items.popleft()
                     item = self._items.popleft()
                 except IndexError:
                 except IndexError:
                     if self._index == self._length:
                     if self._index == self._length:
+                        self._ready = True
                         raise StopIteration
                         raise StopIteration
                     raise TimeoutError
                     raise TimeoutError
         finally:
         finally:
@@ -1102,7 +1156,7 @@ class IMapIterator(object):
         success, value = item
         success, value = item
         if success:
         if success:
             return value
             return value
-        raise value
+        raise Exception(value)
 
 
     __next__ = next                    # XXX
     __next__ = next                    # XXX
 
 
@@ -1121,6 +1175,7 @@ class IMapIterator(object):
                 self._unsorted[i] = obj
                 self._unsorted[i] = obj
 
 
             if self._index == self._length:
             if self._index == self._length:
+                self._ready = True
                 del self._cache[self._job]
                 del self._cache[self._job]
         finally:
         finally:
             self._cond.release()
             self._cond.release()
@@ -1130,11 +1185,21 @@ class IMapIterator(object):
         try:
         try:
             self._length = length
             self._length = length
             if self._index == self._length:
             if self._index == self._length:
+                self._ready = True
                 self._cond.notify()
                 self._cond.notify()
                 del self._cache[self._job]
                 del self._cache[self._job]
         finally:
         finally:
             self._cond.release()
             self._cond.release()
 
 
+    def _ack(self, i, time_accepted, pid):
+        self._worker_pids.append(pid)
+
+    def ready(self):
+        return self._ready
+
+    def worker_pids(self):
+        return self._worker_pids
+
 #
 #
 # Class whose instances are returned by `Pool.imap_unordered()`
 # Class whose instances are returned by `Pool.imap_unordered()`
 #
 #
@@ -1149,6 +1214,7 @@ class IMapUnorderedIterator(IMapIterator):
             self._index += 1
             self._index += 1
             self._cond.notify()
             self._cond.notify()
             if self._index == self._length:
             if self._index == self._length:
+                self._ready = True
                 del self._cache[self._job]
                 del self._cache[self._job]
         finally:
         finally:
             self._cond.release()
             self._cond.release()

+ 7 - 8
celery/concurrency/solo.py

@@ -1,18 +1,17 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
 import os
 import os
 
 
-from celery.concurrency.base import BasePool, apply_target
+from .base import BasePool, apply_target
 
 
 
 
 class TaskPool(BasePool):
 class TaskPool(BasePool):
     """Solo task pool (blocking, inline)."""
     """Solo task pool (blocking, inline)."""
 
 
-    def on_start(self):
-        self.pid = os.getpid()
-
-    def on_apply(self, target, args, kwargs, callback=None,
-            accept_callback=None, **_):
-        return apply_target(target, args, kwargs,
-                            callback, accept_callback, self.pid)
+    def __init__(self, *args, **kwargs):
+        super(TaskPool, self).__init__(*args, **kwargs)
+        self.on_apply = apply_target
 
 
     def _get_info(self):
     def _get_info(self):
         return {"max-concurrency": 1,
         return {"max-concurrency": 1,

+ 15 - 1
celery/concurrency/threads.py

@@ -1,4 +1,15 @@
-from celery.concurrency.base import apply_target, BasePool
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from UserDict import UserDict
+
+from .base import apply_target, BasePool
+
+
+class NullDict(UserDict):
+
+    def __setitem__(self, key, value):
+        pass
 
 
 
 
 class TaskPool(BasePool):
 class TaskPool(BasePool):
@@ -15,6 +26,9 @@ class TaskPool(BasePool):
 
 
     def on_start(self):
     def on_start(self):
         self._pool = self.ThreadPool(self.limit)
         self._pool = self.ThreadPool(self.limit)
+        # threadpool stores all work requests until they are processed
+        # we don't need this dict, and it occupies way too much memory.
+        self._pool.workRequests = NullDict()
 
 
     def on_stop(self):
     def on_stop(self):
         self._pool.dismissWorkers(self.limit, do_join=True)
         self._pool.dismissWorkers(self.limit, do_join=True)

+ 5 - 2
celery/conf.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 """
 
 
 **DEPRECATED**
 **DEPRECATED**
@@ -6,8 +7,10 @@ Use :mod:`celery.defaults` instead.
 
 
 
 
 """
 """
-from celery import current_app
-from celery.app import defaults
+from __future__ import absolute_import
+
+from . import current_app
+from .app import defaults
 
 
 _DEFAULTS = defaults.DEFAULTS
 _DEFAULTS = defaults.DEFAULTS
 conf = current_app.conf
 conf = current_app.conf

+ 4 - 1
celery/contrib/abortable.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 """
 =========================
 =========================
 Abortable tasks overview
 Abortable tasks overview
@@ -29,7 +30,7 @@ In the consumer:
 
 
    from celery.contrib.abortable import AbortableTask
    from celery.contrib.abortable import AbortableTask
 
 
-   def MyLongRunningTask(AbortableTask):
+   class MyLongRunningTask(AbortableTask):
 
 
        def run(self, **kwargs):
        def run(self, **kwargs):
            logger = self.get_logger(**kwargs)
            logger = self.get_logger(**kwargs)
@@ -78,6 +79,8 @@ have it block until the task is finished.
    database backends.
    database backends.
 
 
 """
 """
+from __future__ import absolute_import
+
 from celery.task.base import Task
 from celery.task.base import Task
 from celery.result import AsyncResult
 from celery.result import AsyncResult
 
 

+ 31 - 7
celery/contrib/batches.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 """
 celery.contrib.batches
 celery.contrib.batches
 ======================
 ======================
@@ -39,24 +40,47 @@ Registering the click is done as follows:
 :license: BSD, see LICENSE for more details.
 :license: BSD, see LICENSE for more details.
 
 
 """
 """
-from itertools import count
-from Queue import Queue
+from __future__ import absolute_import
 
 
-from kombu.utils import cached_property
+from itertools import count
+from Queue import Empty, Queue
 
 
-from celery.datastructures import consume_queue
 from celery.task import Task
 from celery.task import Task
-from celery.utils import timer2
+from celery.utils import cached_property, timer2
 from celery.worker import state
 from celery.worker import state
 
 
 
 
+def consume_queue(queue):
+    """Iterator yielding all immediately available items in a
+    :class:`Queue.Queue`.
+
+    The iterator stops as soon as the queue raises :exc:`Queue.Empty`.
+
+    *Examples*
+
+        >>> q = Queue()
+        >>> map(q.put, range(4))
+        >>> list(consume_queue(q))
+        [0, 1, 2, 3]
+        >>> list(consume_queue(q))
+        []
+
+    """
+    get = queue.get_nowait
+    while 1:
+        try:
+            yield get()
+        except Empty:
+            break
+
+
 def apply_batches_task(task, args, loglevel, logfile):
 def apply_batches_task(task, args, loglevel, logfile):
     task.request.update({"loglevel": loglevel, "logfile": logfile})
     task.request.update({"loglevel": loglevel, "logfile": logfile})
     try:
     try:
         result = task(*args)
         result = task(*args)
     except Exception, exp:
     except Exception, exp:
         result = None
         result = None
-        task.logger.error("There was an Exception: %s" % exp)
+        task.logger.error("There was an Exception: %s", exp, exc_info=True)
     finally:
     finally:
         task.request.clear()
         task.request.clear()
     return result
     return result
@@ -167,7 +191,7 @@ class Batches(Task):
                     callback=acks_late[True] and on_return or None)
                     callback=acks_late[True] and on_return or None)
 
 
     def debug(self, msg):
     def debug(self, msg):
-        self.logger.debug("%s: %s" % (self.name, msg))
+        self.logger.debug("%s: %s", self.name, msg)
 
 
     @cached_property
     @cached_property
     def logger(self):
     def logger(self):

+ 2 - 0
celery/contrib/rdb.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 """
 celery.contrib.rdb
 celery.contrib.rdb
 ==================
 ==================
@@ -36,6 +37,7 @@ Inspired by http://snippets.dzone.com/posts/show/7248
 :license: BSD, see LICENSE for more details.
 :license: BSD, see LICENSE for more details.
 
 
 """
 """
+from __future__ import absolute_import
 
 
 import errno
 import errno
 import os
 import os

+ 121 - 100
celery/datastructures.py

@@ -1,22 +1,27 @@
+# -*- coding: utf-8 -*-
 """
 """
-celery.datastructures
-=====================
+    celery.datastructures
+    ~~~~~~~~~~~~~~~~~~~~~
 
 
-Custom data structures.
+    Custom types and data structures.
 
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 
 """
 """
 from __future__ import absolute_import
 from __future__ import absolute_import
+from __future__ import with_statement
 
 
+import sys
 import time
 import time
 import traceback
 import traceback
 
 
 from itertools import chain
 from itertools import chain
-from Queue import Empty
+from threading import RLock
 
 
-from celery.utils.compat import OrderedDict
+from kombu.utils.limits import TokenBucket  # noqa
+
+from .utils.compat import UserDict, OrderedDict
 
 
 
 
 class AttributeDictMixin(object):
 class AttributeDictMixin(object):
@@ -79,8 +84,16 @@ class DictAttribute(object):
     def __contains__(self, key):
     def __contains__(self, key):
         return hasattr(self.obj, key)
         return hasattr(self.obj, key)
 
 
-    def iteritems(self):
+    def _iterate_items(self):
         return vars(self.obj).iteritems()
         return vars(self.obj).iteritems()
+    iteritems = _iterate_items
+
+    if sys.version_info >= (3, 0):
+        items = _iterate_items
+    else:
+
+        def items(self):
+            return list(self._iterate_items())
 
 
 
 
 class ConfigurationView(AttributeDictMixin):
 class ConfigurationView(AttributeDictMixin):
@@ -145,23 +158,55 @@ class ConfigurationView(AttributeDictMixin):
         # changes takes precedence.
         # changes takes precedence.
         return chain(*[op(d) for d in reversed(self._order)])
         return chain(*[op(d) for d in reversed(self._order)])
 
 
-    def iterkeys(self):
+    def _iterate_keys(self):
         return self._iter(lambda d: d.iterkeys())
         return self._iter(lambda d: d.iterkeys())
+    iterkeys = _iterate_keys
 
 
-    def iteritems(self):
+    def _iterate_items(self):
         return self._iter(lambda d: d.iteritems())
         return self._iter(lambda d: d.iteritems())
+    iteritems = _iterate_items
 
 
-    def itervalues(self):
+    def _iterate_values(self):
         return self._iter(lambda d: d.itervalues())
         return self._iter(lambda d: d.itervalues())
+    itervalues = _iterate_values
 
 
     def keys(self):
     def keys(self):
-        return list(self.iterkeys())
+        return list(self._iterate_keys())
 
 
     def items(self):
     def items(self):
-        return list(self.iteritems())
+        return list(self._iterate_items())
 
 
     def values(self):
     def values(self):
-        return list(self.itervalues())
+        return list(self._iterate_values())
+
+
+class _Code(object):
+
+    def __init__(self, code):
+        self.co_filename = code.co_filename
+        self.co_name = code.co_name
+
+
+class _Frame(object):
+    Code = _Code
+
+    def __init__(self, frame):
+        self.f_globals = {
+            "__file__": frame.f_globals.get("__file__", "__main__"),
+        }
+        self.f_code = self.Code(frame.f_code)
+
+
+class Traceback(object):
+    Frame = _Frame
+
+    def __init__(self, tb):
+        self.tb_frame = self.Frame(tb.tb_frame)
+        self.tb_lineno = tb.tb_lineno
+        if tb.tb_next is None:
+            self.tb_next = None
+        else:
+            self.tb_next = Traceback(tb.tb_next)
 
 
 
 
 class ExceptionInfo(object):
 class ExceptionInfo(object):
@@ -172,15 +217,21 @@ class ExceptionInfo(object):
 
 
     """
     """
 
 
-    #: The original exception.
+    #: Exception type.
+    type = None
+
+    #: Exception instance.
     exception = None
     exception = None
 
 
-    #: A traceback form the point when :attr:`exception` was raised.
+    #: Pickleable traceback instance for use with :mod:`traceback`
+    tb = None
+
+    #: String representation of the traceback.
     traceback = None
     traceback = None
 
 
     def __init__(self, exc_info):
     def __init__(self, exc_info):
-        _, exception, _ = exc_info
-        self.exception = exception
+        self.type, self.exception, tb = exc_info
+        self.tb = Traceback(tb)
         self.traceback = ''.join(traceback.format_exception(*exc_info))
         self.traceback = ''.join(traceback.format_exception(*exc_info))
 
 
     def __str__(self):
     def __str__(self):
@@ -189,29 +240,9 @@ class ExceptionInfo(object):
     def __repr__(self):
     def __repr__(self):
         return "<ExceptionInfo: %r>" % (self.exception, )
         return "<ExceptionInfo: %r>" % (self.exception, )
 
 
-
-def consume_queue(queue):
-    """Iterator yielding all immediately available items in a
-    :class:`Queue.Queue`.
-
-    The iterator stops as soon as the queue raises :exc:`Queue.Empty`.
-
-    *Examples*
-
-        >>> q = Queue()
-        >>> map(q.put, range(4))
-        >>> list(consume_queue(q))
-        [0, 1, 2, 3]
-        >>> list(consume_queue(q))
-        []
-
-    """
-    get = queue.get_nowait
-    while 1:
-        try:
-            yield get()
-        except Empty:
-            break
+    @property
+    def exc_info(self):
+        return self.type, self.exception, self.tb
 
 
 
 
 class LimitedSet(object):
 class LimitedSet(object):
@@ -289,76 +320,66 @@ class LimitedSet(object):
         return self.chronologically[0]
         return self.chronologically[0]
 
 
 
 
-class LocalCache(OrderedDict):
-    """Dictionary with a finite number of keys.
+class LRUCache(UserDict):
+    """LRU Cache implementation using a doubly linked list to track access.
 
 
-    Older items expires first.
+    :keyword limit: The maximum number of keys to keep in the cache.
+        When a new key is inserted and the limit has been exceeded,
+        the *Least Recently Used* key will be discarded from the
+        cache.
 
 
     """
     """
 
 
     def __init__(self, limit=None):
     def __init__(self, limit=None):
-        super(LocalCache, self).__init__()
         self.limit = limit
         self.limit = limit
+        self.mutex = RLock()
+        self.data = OrderedDict()
 
 
-    def __setitem__(self, key, value):
-        while len(self) >= self.limit:
-            self.popitem(last=False)
-        super(LocalCache, self).__setitem__(key, value)
-
-
-class TokenBucket(object):
-    """Token Bucket Algorithm.
-
-    See http://en.wikipedia.org/wiki/Token_Bucket
-    Most of this code was stolen from an entry in the ASPN Python Cookbook:
-    http://code.activestate.com/recipes/511490/
-
-    .. admonition:: Thread safety
-
-        This implementation may not be thread safe.
-
-    """
-
-    #: The rate in tokens/second that the bucket will be refilled
-    fill_rate = None
-
-    #: Maximum number of tokensin the bucket.
-    capacity = 1
-
-    #: Timestamp of the last time a token was taken out of the bucket.
-    timestamp = None
+    def __getitem__(self, key):
+        with self.mutex:
+            value = self[key] = self.data.pop(key)
+            return value
 
 
-    def __init__(self, fill_rate, capacity=1):
-        self.capacity = float(capacity)
-        self._tokens = capacity
-        self.fill_rate = float(fill_rate)
-        self.timestamp = time.time()
+    def keys(self):
+        # userdict.keys in py3k calls __getitem__
+        return self.data.keys()
 
 
-    def can_consume(self, tokens=1):
-        """Returns :const:`True` if `tokens` number of tokens can be consumed
-        from the bucket."""
-        if tokens <= self._get_tokens():
-            self._tokens -= tokens
-            return True
-        return False
+    def values(self):
+        return list(self._iterate_values())
 
 
-    def expected_time(self, tokens=1):
-        """Returns the expected time in seconds when a new token should be
-        available.
+    def items(self):
+        return list(self._iterate_items())
 
 
-        .. admonition:: Warning
+    def __setitem__(self, key, value):
+        # remove least recently used key.
+        with self.mutex:
+            if self.limit and len(self.data) >= self.limit:
+                self.data.pop(iter(self.data).next())
+            self.data[key] = value
 
 
-            This consumes a token from the bucket.
+    def __iter__(self):
+        return self.data.iterkeys()
 
 
-        """
-        _tokens = self._get_tokens()
-        tokens = max(tokens, _tokens)
-        return (tokens - _tokens) / self.fill_rate
+    def _iterate_items(self):
+        for k in self.data:
+            try:
+                yield (k, self.data[k])
+            except KeyError:
+                pass
+    iteritems = _iterate_items
 
 
-    def _get_tokens(self):
-        if self._tokens < self.capacity:
-            now = time.time()
-            delta = self.fill_rate * (now - self.timestamp)
-            self._tokens = min(self.capacity, self._tokens + delta)
-            self.timestamp = now
-        return self._tokens
+    def _iterate_values(self):
+        for k in self.data:
+            try:
+                yield self.data[k]
+            except KeyError:
+                pass
+    itervalues = _iterate_values
+
+    def incr(self, key, delta=1):
+        with self.mutex:
+            # this acts as memcached does- store as a string, but return a
+            # integer as long as it exists and we can cast it
+            newval = int(self.data.pop(key)) + delta
+            self[key] = str(newval)
+            return newval

+ 3 - 0
celery/db/a805d4bd.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 """
 a805d4bd
 a805d4bd
 This module fixes a bug with pickling and relative imports in Python < 2.6.
 This module fixes a bug with pickling and relative imports in Python < 2.6.
@@ -19,6 +20,8 @@ Hence the random module name "a805d5bd" is taken to decrease the chances of
 a collision.
 a collision.
 
 
 """
 """
+from __future__ import absolute_import
+
 from sqlalchemy.types import PickleType as _PickleType
 from sqlalchemy.types import PickleType as _PickleType
 
 
 
 

+ 3 - 0
celery/db/dfd042c7.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 """
 dfd042c7
 dfd042c7
 
 
@@ -5,6 +6,8 @@ SQLAlchemy 0.5.8 version of a805d4bd, see the docstring of that module
 for an explanation of this workaround.
 for an explanation of this workaround.
 
 
 """
 """
+from __future__ import absolute_import
+
 from sqlalchemy.types import PickleType as _PickleType
 from sqlalchemy.types import PickleType as _PickleType
 from sqlalchemy import util
 from sqlalchemy import util
 
 

+ 12 - 7
celery/db/models.py

@@ -1,14 +1,19 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
 from datetime import datetime
 from datetime import datetime
 
 
 import sqlalchemy as sa
 import sqlalchemy as sa
 
 
-from celery import states
-from celery.db.session import ResultModelBase
+from .. import states
+
+from .session import ResultModelBase
+
 # See docstring of a805d4bd for an explanation for this workaround ;)
 # See docstring of a805d4bd for an explanation for this workaround ;)
 if sa.__version__.startswith('0.5'):
 if sa.__version__.startswith('0.5'):
-    from celery.db.dfd042c7 import PickleType
+    from .dfd042c7 import PickleType
 else:
 else:
-    from celery.db.a805d4bd import PickleType  # noqa
+    from .a805d4bd import PickleType  # noqa
 
 
 
 
 class Task(ResultModelBase):
 class Task(ResultModelBase):
@@ -22,8 +27,8 @@ class Task(ResultModelBase):
     task_id = sa.Column(sa.String(255), unique=True)
     task_id = sa.Column(sa.String(255), unique=True)
     status = sa.Column(sa.String(50), default=states.PENDING)
     status = sa.Column(sa.String(50), default=states.PENDING)
     result = sa.Column(PickleType, nullable=True)
     result = sa.Column(PickleType, nullable=True)
-    date_done = sa.Column(sa.DateTime, default=datetime.now,
-                       onupdate=datetime.now, nullable=True)
+    date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
+                       onupdate=datetime.utcnow, nullable=True)
     traceback = sa.Column(sa.Text, nullable=True)
     traceback = sa.Column(sa.Text, nullable=True)
 
 
     def __init__(self, task_id):
     def __init__(self, task_id):
@@ -49,7 +54,7 @@ class TaskSet(ResultModelBase):
                 autoincrement=True, primary_key=True)
                 autoincrement=True, primary_key=True)
     taskset_id = sa.Column(sa.String(255), unique=True)
     taskset_id = sa.Column(sa.String(255), unique=True)
     result = sa.Column(sa.PickleType, nullable=True)
     result = sa.Column(sa.PickleType, nullable=True)
-    date_done = sa.Column(sa.DateTime, default=datetime.now,
+    date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
                        nullable=True)
                        nullable=True)
 
 
     def __init__(self, taskset_id, result):
     def __init__(self, taskset_id, result):

+ 8 - 2
celery/db/session.py

@@ -1,3 +1,6 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
 from collections import defaultdict
 from collections import defaultdict
 
 
 from sqlalchemy import create_engine
 from sqlalchemy import create_engine
@@ -8,6 +11,7 @@ ResultModelBase = declarative_base()
 
 
 _SETUP = defaultdict(lambda: False)
 _SETUP = defaultdict(lambda: False)
 _ENGINES = {}
 _ENGINES = {}
+_SESSIONS = {}
 
 
 
 
 def get_engine(dburi, **kwargs):
 def get_engine(dburi, **kwargs):
@@ -16,9 +20,11 @@ def get_engine(dburi, **kwargs):
     return _ENGINES[dburi]
     return _ENGINES[dburi]
 
 
 
 
-def create_session(dburi, **kwargs):
+def create_session(dburi, short_lived_sessions=False, **kwargs):
     engine = get_engine(dburi, **kwargs)
     engine = get_engine(dburi, **kwargs)
-    return engine, sessionmaker(bind=engine)
+    if short_lived_sessions or dburi not in _SESSIONS:
+        _SESSIONS[dburi] = sessionmaker(bind=engine)
+    return engine, _SESSIONS[dburi]
 
 
 
 
 def setup_results(engine):
 def setup_results(engine):

+ 5 - 2
celery/decorators.py

@@ -12,12 +12,15 @@ The new decorators does not support magic keyword arguments.
 :license: BSD, see LICENSE for more details.
 :license: BSD, see LICENSE for more details.
 
 
 """
 """
+from __future__ import absolute_import
+
 import warnings
 import warnings
 
 
-from celery import task as _task
+from . import task as _task
+from .exceptions import CDeprecationWarning
 
 
 
 
-warnings.warn(PendingDeprecationWarning("""
+warnings.warn(CDeprecationWarning("""
 The `celery.decorators` module and the magic keyword arguments
 The `celery.decorators` module and the magic keyword arguments
 are pending deprecation and will be deprecated in 2.4, then removed
 are pending deprecation and will be deprecated in 2.4, then removed
 in 3.0.
 in 3.0.

+ 19 - 5
celery/events/__init__.py

@@ -1,4 +1,18 @@
-from __future__ import absolute_import, with_statement
+# -*- coding: utf-8 -*-
+"""
+    celery.events
+    ~~~~~~~~~~~~~
+
+    Events are messages sent for actions happening
+    in the worker (and clients if :setting:`CELERY_SEND_TASK_SENT_EVENT`
+    is enabled), used for monitoring purposes.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+from __future__ import with_statement
 
 
 import time
 import time
 import socket
 import socket
@@ -11,8 +25,8 @@ from itertools import count
 from kombu.entity import Exchange, Queue
 from kombu.entity import Exchange, Queue
 from kombu.messaging import Consumer, Producer
 from kombu.messaging import Consumer, Producer
 
 
-from celery.app import app_or_default
-from celery.utils import gen_unique_id
+from ..app import app_or_default
+from ..utils import uuid
 
 
 event_exchange = Exchange("celeryev", type="topic")
 event_exchange = Exchange("celeryev", type="topic")
 
 
@@ -144,7 +158,7 @@ class EventReceiver(object):
         if handlers is not None:
         if handlers is not None:
             self.handlers = handlers
             self.handlers = handlers
         self.routing_key = routing_key
         self.routing_key = routing_key
-        self.node_id = node_id or gen_unique_id()
+        self.node_id = node_id or uuid()
         self.queue = Queue("%s.%s" % ("celeryev", self.node_id),
         self.queue = Queue("%s.%s" % ("celeryev", self.node_id),
                            exchange=event_exchange,
                            exchange=event_exchange,
                            routing_key=self.routing_key,
                            routing_key=self.routing_key,
@@ -239,7 +253,7 @@ class Events(object):
                                app=self.app)
                                app=self.app)
 
 
     def State(self):
     def State(self):
-        from celery.events.state import State as _State
+        from .state import State as _State
         return _State()
         return _State()
 
 
     @contextmanager
     @contextmanager

+ 19 - 6
celery/events/cursesmon.py

@@ -1,4 +1,16 @@
-import celery
+# -*- coding: utf-8 -*-
+"""
+    celery.events.cursesmon
+    ~~~~~~~~~~~~~~~~~~~~~~~
+
+    Graphical monitor of Celery events using curses.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
 import curses
 import curses
 import sys
 import sys
 import threading
 import threading
@@ -9,9 +21,10 @@ from itertools import count
 from textwrap import wrap
 from textwrap import wrap
 from math import ceil
 from math import ceil
 
 
-from celery import states
-from celery.app import app_or_default
-from celery.utils import abbr, abbrtask
+from .. import __version__
+from .. import states
+from ..app import app_or_default
+from ..utils import abbr, abbrtask
 
 
 BORDER_SPACING = 4
 BORDER_SPACING = 4
 LEFT_BORDER_OFFSET = 3
 LEFT_BORDER_OFFSET = 3
@@ -35,7 +48,7 @@ class CursesMonitor(object):
     online_str = "Workers online: "
     online_str = "Workers online: "
     help_title = "Keys: "
     help_title = "Keys: "
     help = ("j:up k:down i:info t:traceback r:result c:revoke ^c: quit")
     help = ("j:up k:down i:info t:traceback r:result c:revoke ^c: quit")
-    greet = "celeryev %s" % celery.__version__
+    greet = "celeryev %s" % __version__
     info_str = "Info: "
     info_str = "Info: "
 
 
     def __init__(self, state, keymap=None, app=None):
     def __init__(self, state, keymap=None, app=None):
@@ -298,7 +311,7 @@ class CursesMonitor(object):
         attr = curses.A_NORMAL
         attr = curses.A_NORMAL
         if task.uuid == self.selected_task:
         if task.uuid == self.selected_task:
             attr = curses.A_STANDOUT
             attr = curses.A_STANDOUT
-        timestamp = datetime.fromtimestamp(
+        timestamp = datetime.utcfromtimestamp(
                         task.timestamp or time.time())
                         task.timestamp or time.time())
         timef = timestamp.strftime("%H:%M:%S")
         timef = timestamp.strftime("%H:%M:%S")
         line = self.format_row(task.uuid, task.name,
         line = self.format_row(task.uuid, task.name,

+ 19 - 5
celery/events/dumper.py

@@ -1,12 +1,26 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.events.dumper
+    ~~~~~~~~~~~~~~~~~~~~
+
+    THis is a simple program that dumps events to the console
+    as they happen.  Think of it like a `tcpdump` for Celery events.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
 import sys
 import sys
 
 
 from datetime import datetime
 from datetime import datetime
 
 
-from celery.app import app_or_default
-from celery.datastructures import LocalCache
+from ..app import app_or_default
+from ..datastructures import LRUCache
 
 
 
 
-TASK_NAMES = LocalCache(0xFFF)
+TASK_NAMES = LRUCache(limit=0xFFF)
 
 
 HUMAN_TYPES = {"worker-offline": "shutdown",
 HUMAN_TYPES = {"worker-offline": "shutdown",
                "worker-online": "started",
                "worker-online": "started",
@@ -23,12 +37,12 @@ def humanize_type(type):
 class Dumper(object):
 class Dumper(object):
 
 
     def on_event(self, event):
     def on_event(self, event):
-        timestamp = datetime.fromtimestamp(event.pop("timestamp"))
+        timestamp = datetime.utcfromtimestamp(event.pop("timestamp"))
         type = event.pop("type").lower()
         type = event.pop("type").lower()
         hostname = event.pop("hostname")
         hostname = event.pop("hostname")
         if type.startswith("task-"):
         if type.startswith("task-"):
             uuid = event.pop("uuid")
             uuid = event.pop("uuid")
-            if type.startswith("task-received"):
+            if type in ("task-received", "task-sent"):
                 task = TASK_NAMES[uuid] = "%s(%s) args=%s kwargs=%s" % (
                 task = TASK_NAMES[uuid] = "%s(%s) args=%s kwargs=%s" % (
                         event.pop("name"), uuid,
                         event.pop("name"), uuid,
                         event.pop("args"),
                         event.pop("args"),

+ 25 - 11
celery/events/snapshot.py

@@ -1,12 +1,29 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.events.snapshot
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Consuming the events as a stream is not always suitable
+    so this module implements a system to take snapshots of the
+    state of a cluster at regular intervals.  There is a full
+    implementation of this writing the snapshots to a database
+    in :mod:`djcelery.snapshots` in the `django-celery` distribution.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
 import atexit
 import atexit
 
 
-from celery import platforms
-from celery.app import app_or_default
-from celery.datastructures import TokenBucket
-from celery.utils import timer2
-from celery.utils import instantiate, LOG_LEVELS
-from celery.utils.dispatch import Signal
-from celery.utils.timeutils import rate
+from kombu.utils.limits import TokenBucket
+
+from .. import platforms
+from ..app import app_or_default
+from ..utils import timer2, instantiate, LOG_LEVELS
+from ..utils.dispatch import Signal
+from ..utils.timeutils import rate
 
 
 
 
 class Polaroid(object):
 class Polaroid(object):
@@ -48,7 +65,7 @@ class Polaroid(object):
 
 
     def shutter(self):
     def shutter(self):
         if self.maxrate is None or self.maxrate.can_consume():
         if self.maxrate is None or self.maxrate.can_consume():
-            self.logger.debug("Shutter: %s" % (self.state, ))
+            self.logger.debug("Shutter: %s", self.state)
             self.shutter_signal.send(self.state)
             self.shutter_signal.send(self.state)
             self.on_shutter(self.state)
             self.on_shutter(self.state)
 
 
@@ -83,9 +100,6 @@ def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
     logger = app.log.setup_logger(loglevel=loglevel,
     logger = app.log.setup_logger(loglevel=loglevel,
                                   logfile=logfile,
                                   logfile=logfile,
                                   name="celery.evcam")
                                   name="celery.evcam")
-    if pidfile:
-        pidlock = platforms.create_pidlock(pidfile).acquire()
-        atexit.register(pidlock.release)
 
 
     logger.info(
     logger.info(
         "-> evcam: Taking snapshots with %s (every %s secs.)\n" % (
         "-> evcam: Taking snapshots with %s (every %s secs.)\n" % (

+ 42 - 13
celery/events/state.py

@@ -1,13 +1,35 @@
-from __future__ import absolute_import, with_statement
+# -*- coding: utf-8 -*-
+"""
+    celery.events.state
+    ~~~~~~~~~~~~~~~~~~~
+
+    This module implements a datastructure used to keep
+    track of the state of a cluster of workers and the tasks
+    it is working on (by consuming events).
+
+    For every event consumed the state is updated,
+    so the state represents the state of the cluster
+    at the time of the last event.
+
+    Snapshots (:mod:`celery.events.snapshot`) can be used to
+    take "pictures" of this state at regular intervals
+    to e.g. store that in a database.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+from __future__ import with_statement
 
 
 import time
 import time
 import heapq
 import heapq
 
 
 from threading import Lock
 from threading import Lock
 
 
-from celery import states
-from celery.datastructures import AttributeDict, LocalCache
-from celery.utils import kwdict
+from .. import states
+from ..datastructures import AttributeDict, LRUCache
+from ..utils import kwdict
 
 
 #: Hartbeat expiry time in seconds.  The worker will be considered offline
 #: Hartbeat expiry time in seconds.  The worker will be considered offline
 #: if no heartbeat is received within this time.
 #: if no heartbeat is received within this time.
@@ -172,8 +194,8 @@ class State(object):
 
 
     def __init__(self, callback=None,
     def __init__(self, callback=None,
             max_workers_in_memory=5000, max_tasks_in_memory=10000):
             max_workers_in_memory=5000, max_tasks_in_memory=10000):
-        self.workers = LocalCache(max_workers_in_memory)
-        self.tasks = LocalCache(max_tasks_in_memory)
+        self.workers = LRUCache(limit=max_workers_in_memory)
+        self.tasks = LRUCache(limit=max_tasks_in_memory)
         self.event_callback = callback
         self.event_callback = callback
         self.group_handlers = {"worker": self.worker_event,
         self.group_handlers = {"worker": self.worker_event,
                                "task": self.task_event}
                                "task": self.task_event}
@@ -194,9 +216,10 @@ class State(object):
 
 
     def _clear_tasks(self, ready=True):
     def _clear_tasks(self, ready=True):
         if ready:
         if ready:
-            self.tasks = dict((uuid, task)
-                                for uuid, task in self.tasks.items()
-                                    if task.state not in states.READY_STATES)
+            in_progress = dict((uuid, task) for uuid, task in self.itertasks()
+                                if task.state not in states.READY_STATES)
+            self.tasks.clear()
+            self.tasks.update(in_progress)
         else:
         else:
             self.tasks.clear()
             self.tasks.clear()
 
 
@@ -264,13 +287,19 @@ class State(object):
         if self.event_callback:
         if self.event_callback:
             self.event_callback(self, event)
             self.event_callback(self, event)
 
 
+    def itertasks(self, limit=None):
+        for index, row in enumerate(self.tasks.iteritems()):
+            yield row
+            if limit and index >= limit:
+                break
+
     def tasks_by_timestamp(self, limit=None):
     def tasks_by_timestamp(self, limit=None):
         """Get tasks by timestamp.
         """Get tasks by timestamp.
 
 
         Returns a list of `(uuid, task)` tuples.
         Returns a list of `(uuid, task)` tuples.
 
 
         """
         """
-        return self._sort_tasks_by_time(self.tasks.items()[:limit])
+        return self._sort_tasks_by_time(self.itertasks(limit))
 
 
     def _sort_tasks_by_time(self, tasks):
     def _sort_tasks_by_time(self, tasks):
         """Sort task items by time."""
         """Sort task items by time."""
@@ -284,7 +313,7 @@ class State(object):
 
 
         """
         """
         return self._sort_tasks_by_time([(uuid, task)
         return self._sort_tasks_by_time([(uuid, task)
-                for uuid, task in self.tasks.items()[:limit]
+                for uuid, task in self.itertasks(limit)
                     if task.name == name])
                     if task.name == name])
 
 
     def tasks_by_worker(self, hostname, limit=None):
     def tasks_by_worker(self, hostname, limit=None):
@@ -294,12 +323,12 @@ class State(object):
 
 
         """
         """
         return self._sort_tasks_by_time([(uuid, task)
         return self._sort_tasks_by_time([(uuid, task)
-                for uuid, task in self.tasks.items()[:limit]
+                for uuid, task in self.itertasks(limit)
                     if task.worker.hostname == hostname])
                     if task.worker.hostname == hostname])
 
 
     def task_types(self):
     def task_types(self):
         """Returns a list of all seen task types."""
         """Returns a list of all seen task types."""
-        return list(sorted(set(task.name for task in self.tasks.values())))
+        return list(sorted(set(task.name for task in self.tasks.itervalues())))
 
 
     def alive_workers(self):
     def alive_workers(self):
         """Returns a list of (seemingly) alive workers."""
         """Returns a list of (seemingly) alive workers."""

+ 29 - 0
celery/exceptions.py

@@ -1,8 +1,29 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.exceptions
+    ~~~~~~~~~~~~~~~~~
+
+    This module contains Celery-specific exceptions.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
 UNREGISTERED_FMT = """\
 UNREGISTERED_FMT = """\
 Task of kind %s is not registered, please make sure it's imported.\
 Task of kind %s is not registered, please make sure it's imported.\
 """
 """
 
 
 
 
+class SecurityError(Exception):
+    """Security related exceptions.
+
+    Handle with care.
+
+    """
+
+
 class SystemTerminate(SystemExit):
 class SystemTerminate(SystemExit):
     """Signals that the worker should terminate."""
     """Signals that the worker should terminate."""
 
 
@@ -61,3 +82,11 @@ class TaskRevokedError(Exception):
 
 
 class NotConfigured(UserWarning):
 class NotConfigured(UserWarning):
     """Celery has not been configured, as no config module has been found."""
     """Celery has not been configured, as no config module has been found."""
+
+
+class CPendingDeprecationWarning(PendingDeprecationWarning):
+    pass
+
+
+class CDeprecationWarning(DeprecationWarning):
+    pass

+ 4 - 3
celery/execute/__init__.py

@@ -1,7 +1,8 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
-from celery import current_app
-from celery.utils import deprecated
+from .. import current_app
+from ..utils import deprecated
 
 
 send_task = current_app.send_task
 send_task = current_app.send_task
 
 
@@ -21,5 +22,5 @@ def apply(task, *args, **kwargs):
 @deprecated(removal="2.3",
 @deprecated(removal="2.3",
             alternative="Use registry.tasks[name].delay instead.")
             alternative="Use registry.tasks[name].delay instead.")
 def delay_task(task, *args, **kwargs):
 def delay_task(task, *args, **kwargs):
-    from celery.registry import tasks
+    from ..registry import tasks
     return tasks[task].apply_async(args, kwargs)
     return tasks[task].apply_async(args, kwargs)

+ 20 - 8
celery/execute/trace.py

@@ -1,13 +1,24 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.execute.trace
+    ~~~~~~~~~~~~~~~~~~~~
+
+    This module defines how the task execution is traced:
+    errors are recorded, handlers are applied and so on.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import sys
 import sys
 import traceback
 import traceback
 
 
-from celery import states
-from celery import signals
-from celery.registry import tasks
-from celery.exceptions import RetryTaskError
-from celery.datastructures import ExceptionInfo
+from .. import states, signals
+from ..datastructures import ExceptionInfo
+from ..exceptions import RetryTaskError
+from ..registry import tasks
 
 
 
 
 class TraceInfo(object):
 class TraceInfo(object):
@@ -72,7 +83,7 @@ class TaskTrace(object):
 
 
     def execute(self):
     def execute(self):
         self.task.request.update(self.request, args=self.args,
         self.task.request.update(self.request, args=self.args,
-                                               kwargs=self.kwargs)
+                                 called_directly=False, kwargs=self.kwargs)
         signals.task_prerun.send(sender=self.task, task_id=self.task_id,
         signals.task_prerun.send(sender=self.task, task_id=self.task_id,
                                  task=self.task, args=self.args,
                                  task=self.task, args=self.args,
                                  kwargs=self.kwargs)
                                  kwargs=self.kwargs)
@@ -92,13 +103,14 @@ class TaskTrace(object):
         handler = self._trace_handlers[trace.status]
         handler = self._trace_handlers[trace.status]
         r = handler(trace.retval, trace.exc_type, trace.tb, trace.strtb)
         r = handler(trace.retval, trace.exc_type, trace.tb, trace.strtb)
         self.handle_after_return(trace.status, trace.retval,
         self.handle_after_return(trace.status, trace.retval,
-                                 trace.exc_type, trace.tb, trace.strtb)
+                                 trace.exc_type, trace.tb, trace.strtb,
+                                 einfo=trace.exc_info)
         return r
         return r
 
 
     def handle_after_return(self, status, retval, type_, tb, strtb,
     def handle_after_return(self, status, retval, type_, tb, strtb,
             einfo=None):
             einfo=None):
         if status in states.EXCEPTION_STATES:
         if status in states.EXCEPTION_STATES:
-            einfo = ExceptionInfo((retval, type_, tb))
+            einfo = ExceptionInfo(einfo)
         self.task.after_return(status, retval, self.task_id,
         self.task.after_return(status, retval, self.task_id,
                                self.args, self.kwargs, einfo)
                                self.args, self.kwargs, einfo)
 
 

+ 18 - 13
celery/loaders/__init__.py

@@ -1,9 +1,19 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
+"""
+    celery.loaders
+    ~~~~~~~~~~~~~~
+
+    Loaders define how configuration is read, what happens
+    when workers start, when tasks are executed and so on.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 
-import os
+"""
+from __future__ import absolute_import
 
 
-from celery import current_app
-from celery.utils import get_cls_by_name
+from .. import current_app
+from ..utils import deprecated, get_cls_by_name
 
 
 LOADER_ALIASES = {"app": "celery.loaders.app.AppLoader",
 LOADER_ALIASES = {"app": "celery.loaders.app.AppLoader",
                   "default": "celery.loaders.default.Loader",
                   "default": "celery.loaders.default.Loader",
@@ -15,18 +25,13 @@ def get_loader_cls(loader):
     return get_cls_by_name(loader, LOADER_ALIASES)
     return get_cls_by_name(loader, LOADER_ALIASES)
 
 
 
 
-def setup_loader():
-    # XXX Deprecate
-    return get_loader_cls(os.environ.setdefault("CELERY_LOADER", "default"))()
-
-
+@deprecated(deprecation="2.5", removal="3.0",
+        alternative="celery.current_app.loader")
 def current_loader():
 def current_loader():
-    """Detect and return the current loader."""
-    # XXX Deprecate
     return current_app.loader
     return current_app.loader
 
 
 
 
+@deprecated(deprecation="2.5", removal="3.0",
+            alternative="celery.current_app.conf")
 def load_settings():
 def load_settings():
-    """Load the global settings object."""
-    # XXX Deprecate
     return current_app.conf
     return current_app.conf

+ 12 - 1
celery/loaders/app.py

@@ -1,6 +1,17 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.loaders.app
+    ~~~~~~~~~~~~~~~~~~
+
+    The default loader used with custom app instances.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
-from celery.loaders.base import BaseLoader
+from .base import BaseLoader
 
 
 
 
 class AppLoader(BaseLoader):
 class AppLoader(BaseLoader):

+ 35 - 14
celery/loaders/base.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.loaders.base
+    ~~~~~~~~~~~~~~~~~~~
+
+    Loader base class.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import importlib
 import importlib
@@ -6,12 +17,12 @@ import re
 import warnings
 import warnings
 
 
 from anyjson import deserialize
 from anyjson import deserialize
-from kombu.utils import cached_property
 
 
-from celery.datastructures import DictAttribute
-from celery.exceptions import ImproperlyConfigured
-from celery.utils import get_cls_by_name
-from celery.utils import import_from_cwd as _import_from_cwd
+from ..datastructures import DictAttribute
+from ..exceptions import ImproperlyConfigured
+from ..utils import (cached_property, get_cls_by_name,
+                     import_from_cwd as _import_from_cwd)
+from ..utils.functional import maybe_list
 
 
 BUILTIN_MODULES = frozenset(["celery.task"])
 BUILTIN_MODULES = frozenset(["celery.task"])
 
 
@@ -47,7 +58,7 @@ class BaseLoader(object):
     _conf = None
     _conf = None
 
 
     def __init__(self, app=None, **kwargs):
     def __init__(self, app=None, **kwargs):
-        from celery.app import app_or_default
+        from ..app import app_or_default
         self.app = app_or_default(app)
         self.app = app_or_default(app)
 
 
     def on_task_init(self, task_id, task):
     def on_task_init(self, task_id, task):
@@ -63,18 +74,23 @@ class BaseLoader(object):
         starts."""
         starts."""
         pass
         pass
 
 
+    def on_worker_process_init(self):
+        """This method is called when a child process starts."""
+        pass
+
     def import_task_module(self, module):
     def import_task_module(self, module):
         return self.import_from_cwd(module)
         return self.import_from_cwd(module)
 
 
-    def import_module(self, module):
-        return importlib.import_module(module)
+    def import_module(self, module, package=None):
+        return importlib.import_module(module, package=package)
 
 
-    def import_from_cwd(self, module, imp=None):
+    def import_from_cwd(self, module, imp=None, package=None):
         return _import_from_cwd(module,
         return _import_from_cwd(module,
-                self.import_module if imp is None else imp)
+                self.import_module if imp is None else imp,
+                package=package)
 
 
     def import_default_modules(self):
     def import_default_modules(self):
-        imports = set(list(self.conf.get("CELERY_IMPORTS") or ()))
+        imports = set(maybe_list(self.conf.get("CELERY_IMPORTS") or ()))
         return [self.import_task_module(module)
         return [self.import_task_module(module)
                     for module in imports | self.builtin_modules]
                     for module in imports | self.builtin_modules]
 
 
@@ -83,6 +99,9 @@ class BaseLoader(object):
             self.worker_initialized = True
             self.worker_initialized = True
             self.on_worker_init()
             self.on_worker_init()
 
 
+    def init_worker_process(self):
+        self.on_worker_process_init()
+
     def config_from_envvar(self, variable_name, silent=False):
     def config_from_envvar(self, variable_name, silent=False):
         module_name = os.environ.get(variable_name)
         module_name = os.environ.get(variable_name)
         if not module_name:
         if not module_name:
@@ -113,7 +132,7 @@ class BaseLoader(object):
                 override_types={"tuple": "json",
                 override_types={"tuple": "json",
                                 "list": "json",
                                 "list": "json",
                                 "dict": "json"}):
                                 "dict": "json"}):
-        from celery.app.defaults import Option, NAMESPACES
+        from ..app.defaults import Option, NAMESPACES
         namespace = namespace.upper()
         namespace = namespace.upper()
         typemap = dict(Option.typemap, **extra_types)
         typemap = dict(Option.typemap, **extra_types)
 
 
@@ -155,13 +174,15 @@ class BaseLoader(object):
 
 
     def mail_admins(self, subject, body, fail_silently=False,
     def mail_admins(self, subject, body, fail_silently=False,
             sender=None, to=None, host=None, port=None,
             sender=None, to=None, host=None, port=None,
-            user=None, password=None, timeout=None):
+            user=None, password=None, timeout=None,
+            use_ssl=False, use_tls=False):
         try:
         try:
             message = self.mail.Message(sender=sender, to=to,
             message = self.mail.Message(sender=sender, to=to,
                                         subject=subject, body=body)
                                         subject=subject, body=body)
             mailer = self.mail.Mailer(host=host, port=port,
             mailer = self.mail.Mailer(host=host, port=port,
                                       user=user, password=password,
                                       user=user, password=password,
-                                      timeout=timeout)
+                                      timeout=timeout, use_ssl=use_ssl,
+                                      use_tls=use_tls)
             mailer.send(message)
             mailer.send(message)
         except Exception, exc:
         except Exception, exc:
             if not fail_silently:
             if not fail_silently:

+ 16 - 4
celery/loaders/default.py

@@ -1,12 +1,24 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.loaders.default
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    The default loader used when no custom app has been initialized.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import os
 import os
 import warnings
 import warnings
 
 
-from celery.datastructures import AttributeDict
-from celery.exceptions import NotConfigured
-from celery.loaders.base import BaseLoader
-from celery.utils import find_module
+from ..datastructures import AttributeDict
+from ..exceptions import NotConfigured
+from ..utils import find_module
+
+from .base import BaseLoader
 
 
 DEFAULT_CONFIG_MODULE = "celeryconfig"
 DEFAULT_CONFIG_MODULE = "celeryconfig"
 
 

+ 23 - 3
celery/local.py

@@ -1,4 +1,22 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.local
+    ~~~~~~~~~~~~
+
+    This module contains critical utilities that
+    needs to be loaded as soon as possible, and that
+    shall not load any third party modules.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
+
 def try_import(module):
 def try_import(module):
+    """Try to import and return module, or return
+    None if the module does not exist."""
     from importlib import import_module
     from importlib import import_module
     try:
     try:
         return import_module(module)
         return import_module(module)
@@ -6,12 +24,14 @@ def try_import(module):
         pass
         pass
 
 
 
 
-class LocalProxy(object):
-    """Code stolen from werkzeug.local.LocalProxy."""
+class Proxy(object):
+    """Proxy to another object."""
+
+    # Code stolen from werkzeug.local.Proxy.
     __slots__ = ('__local', '__dict__', '__name__')
     __slots__ = ('__local', '__dict__', '__name__')
 
 
     def __init__(self, local, name=None):
     def __init__(self, local, name=None):
-        object.__setattr__(self, '_LocalProxy__local', local)
+        object.__setattr__(self, '_Proxy__local', local)
         object.__setattr__(self, '__name__', name)
         object.__setattr__(self, '__name__', name)
 
 
     def _get_current_object(self):
     def _get_current_object(self):

+ 50 - 34
celery/log.py

@@ -1,4 +1,4 @@
-"""celery.log"""
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import logging
 import logging
@@ -12,14 +12,16 @@ try:
 except ImportError:
 except ImportError:
     current_process = mputil = None  # noqa
     current_process = mputil = None  # noqa
 
 
-from celery import signals
-from celery import current_app
-from celery.utils import LOG_LEVELS, isatty
-from celery.utils.compat import LoggerAdapter
-from celery.utils.compat import WatchedFileHandler
-from celery.utils.encoding import safe_str
-from celery.utils.patch import ensure_process_aware_logger
-from celery.utils.term import colored
+from . import current_app
+from . import signals
+from .local import Proxy
+from .utils import LOG_LEVELS, isatty
+from .utils.compat import LoggerAdapter, WatchedFileHandler
+from .utils.encoding import safe_str, str_t
+from .utils.patch import ensure_process_aware_logger
+from .utils.term import colored
+
+is_py3k = sys.version_info >= (3, 0)
 
 
 
 
 class ColorFormatter(logging.Formatter):
 class ColorFormatter(logging.Formatter):
@@ -34,8 +36,8 @@ class ColorFormatter(logging.Formatter):
 
 
     def formatException(self, ei):
     def formatException(self, ei):
         r = logging.Formatter.formatException(self, ei)
         r = logging.Formatter.formatException(self, ei)
-        if isinstance(r, str):
-            return r.decode("utf-8", "replace")    # Convert to unicode
+        if isinstance(r, str) and not is_py3k:
+            return safe_str(r)
         return r
         return r
 
 
     def format(self, record):
     def format(self, record):
@@ -44,21 +46,21 @@ class ColorFormatter(logging.Formatter):
 
 
         if self.use_color and color:
         if self.use_color and color:
             try:
             try:
-                record.msg = color(safe_str(record.msg))
-            except Exception:
+                record.msg = safe_str(str_t(color(record.msg)))
+            except Exception, exc:
                 record.msg = "<Unrepresentable %r: %r>" % (
                 record.msg = "<Unrepresentable %r: %r>" % (
-                        type(record.msg), traceback.format_stack())
+                        type(record.msg), exc)
+                record.exc_info = sys.exc_info()
 
 
-        # Very ugly, but have to make sure processName is supported
-        # by foreign logger instances.
-        # (processName is always supported by Python 2.7)
-        if "processName" not in record.__dict__:
-            process_name = current_process and current_process()._name or ""
-            record.__dict__["processName"] = process_name
-        t = logging.Formatter.format(self, record)
-        if isinstance(t, unicode):
-            return t.encode("utf-8", "replace")
-        return t
+        if not is_py3k:
+            # Very ugly, but have to make sure processName is supported
+            # by foreign logger instances.
+            # (processName is always supported by Python 2.7)
+            if "processName" not in record.__dict__:
+                process_name = (current_process and
+                                current_process()._name or "")
+                record.__dict__["processName"] = process_name
+        return safe_str(logging.Formatter.format(self, record))
 
 
 
 
 class Logging(object):
 class Logging(object):
@@ -104,7 +106,8 @@ class Logging(object):
 
 
         if mputil and hasattr(mputil, "_logger"):
         if mputil and hasattr(mputil, "_logger"):
             mputil._logger = None
             mputil._logger = None
-        ensure_process_aware_logger()
+        if not is_py3k:
+            ensure_process_aware_logger()
         receivers = signals.setup_logging.send(sender=None,
         receivers = signals.setup_logging.send(sender=None,
                         loglevel=loglevel, logfile=logfile,
                         loglevel=loglevel, logfile=logfile,
                         format=format, colorize=colorize)
                         format=format, colorize=colorize)
@@ -118,7 +121,11 @@ class Logging(object):
             for logger in filter(None, (root, mp)):
             for logger in filter(None, (root, mp)):
                 self._setup_logger(logger, logfile, format, colorize, **kwargs)
                 self._setup_logger(logger, logfile, format, colorize, **kwargs)
                 logger.setLevel(loglevel)
                 logger.setLevel(loglevel)
+                signals.after_setup_logger.send(sender=None, logger=logger,
+                                        loglevel=loglevel, logfile=logfile,
+                                        format=format, colorize=colorize)
         Logging._setup = True
         Logging._setup = True
+
         return receivers
         return receivers
 
 
     def _detect_handler(self, logfile=None):
     def _detect_handler(self, logfile=None):
@@ -181,10 +188,14 @@ class Logging(object):
                                     logfile, format, colorize, **kwargs)
                                     logfile, format, colorize, **kwargs)
         logger.propagate = int(propagate)    # this is an int for some reason.
         logger.propagate = int(propagate)    # this is an int for some reason.
                                              # better to not question why.
                                              # better to not question why.
+        signals.after_setup_task_logger.send(sender=None, logger=logger,
+                                     loglevel=loglevel, logfile=logfile,
+                                     format=format, colorize=colorize)
         return LoggerAdapter(logger, {"task_id": task_id,
         return LoggerAdapter(logger, {"task_id": task_id,
                                       "task_name": task_name})
                                       "task_name": task_name})
 
 
-    def redirect_stdouts_to_logger(self, logger, loglevel=None):
+    def redirect_stdouts_to_logger(self, logger, loglevel=None,
+            stdout=True, stderr=True):
         """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
         """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
         logging instance.
         logging instance.
 
 
@@ -193,7 +204,10 @@ class Logging(object):
 
 
         """
         """
         proxy = LoggingProxy(logger, loglevel)
         proxy = LoggingProxy(logger, loglevel)
-        sys.stdout = sys.stderr = proxy
+        if stdout:
+            sys.stdout = proxy
+        if stderr:
+            sys.stderr = proxy
         return proxy
         return proxy
 
 
     def _setup_logger(self, logger, logfile, format, colorize,
     def _setup_logger(self, logger, logfile, format, colorize,
@@ -208,12 +222,14 @@ class Logging(object):
         return logger
         return logger
 
 
 
 
-setup_logging_subsystem = current_app.log.setup_logging_subsystem
-get_default_logger = current_app.log.get_default_logger
-setup_logger = current_app.log.setup_logger
-setup_task_logger = current_app.log.setup_task_logger
-get_task_logger = current_app.log.get_task_logger
-redirect_stdouts_to_logger = current_app.log.redirect_stdouts_to_logger
+get_default_logger = Proxy(lambda: current_app.log.get_default_logger)
+setup_logger = Proxy(lambda: current_app.log.setup_logger)
+setup_task_logger = Proxy(lambda: current_app.log.setup_task_logger)
+get_task_logger = Proxy(lambda: current_app.log.get_task_logger)
+setup_logging_subsystem = Proxy(
+            lambda: current_app.log.setup_logging_subsystem)
+redirect_stdouts_to_logger = Proxy(
+            lambda: current_app.log.redirect_stdouts_to_logger)
 
 
 
 
 class LoggingProxy(object):
 class LoggingProxy(object):
@@ -271,7 +287,7 @@ class LoggingProxy(object):
         if data and not self.closed:
         if data and not self.closed:
             self._thread.recurse_protection = True
             self._thread.recurse_protection = True
             try:
             try:
-                self.logger.log(self.loglevel, data)
+                self.logger.log(self.loglevel, safe_str(data))
             finally:
             finally:
                 self._thread.recurse_protection = False
                 self._thread.recurse_protection = False
 
 

+ 12 - 7
celery/messaging.py

@@ -1,8 +1,13 @@
-from celery import current_app
+# -*- coding: utf-8 -*-
+"""This module is deprecated, use ``current_app.amqp`` instead."""
+from __future__ import absolute_import
 
 
-TaskPublisher = current_app.amqp.TaskPublisher
-ConsumerSet = current_app.amqp.ConsumerSet
-TaskConsumer = current_app.amqp.TaskConsumer
-establish_connection = current_app.broker_connection
-with_connection = current_app.with_default_connection
-get_consumer_set = current_app.amqp.get_task_consumer
+from . import current_app
+from .local import Proxy
+
+TaskPublisher = Proxy(lambda: current_app.amqp.TaskPublisher)
+ConsumerSet = Proxy(lambda: current_app.amqp.ConsumerSet)
+TaskConsumer = Proxy(lambda: current_app.amqp.TaskConsumer)
+establish_connection = Proxy(lambda: current_app.broker_connection)
+with_connection = Proxy(lambda: current_app.with_default_connection)
+get_consumer_set = Proxy(lambda: current_app.amqp.get_task_consumer)

+ 249 - 35
celery/platforms.py

@@ -1,27 +1,67 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.platforms
+    ~~~~~~~~~~~~~~~~
+
+    Utilities dealing with platform specifics: signals, daemonization,
+    users, groups, and so on.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
-import os
-import sys
 import errno
 import errno
+import os
+import platform as _platform
+import shlex
 import signal as _signal
 import signal as _signal
+import sys
 
 
-from celery.local import try_import
+from .local import try_import
 
 
 _setproctitle = try_import("setproctitle")
 _setproctitle = try_import("setproctitle")
 resource = try_import("resource")
 resource = try_import("resource")
 pwd = try_import("pwd")
 pwd = try_import("pwd")
 grp = try_import("grp")
 grp = try_import("grp")
 
 
+SYSTEM = _platform.system()
+IS_OSX = SYSTEM == "Darwin"
+IS_WINDOWS = SYSTEM == "Windows"
+
 DAEMON_UMASK = 0
 DAEMON_UMASK = 0
 DAEMON_WORKDIR = "/"
 DAEMON_WORKDIR = "/"
 DAEMON_REDIRECT_TO = getattr(os, "devnull", "/dev/null")
 DAEMON_REDIRECT_TO = getattr(os, "devnull", "/dev/null")
 
 
 
 
+def pyimplementation():
+    if hasattr(_platform, "python_implementation"):
+        return _platform.python_implementation()
+    elif sys.platform.startswith("java"):
+        return "Jython %s" % (sys.platform, )
+    elif hasattr(sys, "pypy_version_info"):
+        v = ".".join(map(str, sys.pypy_version_info[:3]))
+        if sys.pypy_version_info[3:]:
+            v += "-" + "".join(map(str, sys.pypy_version_info[3:]))
+        return "PyPy %s" % (v, )
+    else:
+        return "CPython"
+
+
 class LockFailed(Exception):
 class LockFailed(Exception):
+    """Raised if a pidlock can't be acquired."""
     pass
     pass
 
 
 
 
 def get_fdmax(default=None):
 def get_fdmax(default=None):
+    """Returns the maximum number of open file descriptors
+    on this system.
+
+    :keyword default: Value returned if there's no file
+                      descriptor limit.
+
+    """
     fdmax = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
     fdmax = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
     if fdmax == resource.RLIM_INFINITY:
     if fdmax == resource.RLIM_INFINITY:
         return default
         return default
@@ -29,22 +69,23 @@ def get_fdmax(default=None):
 
 
 
 
 class PIDFile(object):
 class PIDFile(object):
+    """PID lock file.
+
+    This is the type returned by :func:`create_pidlock`.
+
+    **Should not be used directly, use the :func:`create_pidlock`
+    context instead**
+
+    """
+
+    #: Path to the pid lock file.
+    path = None
 
 
     def __init__(self, path):
     def __init__(self, path):
         self.path = os.path.abspath(path)
         self.path = os.path.abspath(path)
 
 
-    def write_pid(self):
-        open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
-        open_mode = (((os.R_OK | os.W_OK) << 6) |
-                        ((os.R_OK) << 3) |
-                        ((os.R_OK)))
-        pidfile_fd = os.open(self.path, open_flags, open_mode)
-        pidfile = os.fdopen(pidfile_fd, "w")
-        pid = os.getpid()
-        pidfile.write("%d\n" % (pid, ))
-        pidfile.close()
-
     def acquire(self):
     def acquire(self):
+        """Acquire lock."""
         try:
         try:
             self.write_pid()
             self.write_pid()
         except OSError, exc:
         except OSError, exc:
@@ -53,13 +94,16 @@ class PIDFile(object):
     __enter__ = acquire
     __enter__ = acquire
 
 
     def is_locked(self):
     def is_locked(self):
+        """Returns true if the pid lock exists."""
         return os.path.exists(self.path)
         return os.path.exists(self.path)
 
 
     def release(self, *args):
     def release(self, *args):
+        """Release lock."""
         self.remove()
         self.remove()
     __exit__ = release
     __exit__ = release
 
 
     def read_pid(self):
     def read_pid(self):
+        """Reads and returns the current pid."""
         try:
         try:
             fh = open(self.path, "r")
             fh = open(self.path, "r")
         except IOError, exc:
         except IOError, exc:
@@ -76,6 +120,7 @@ class PIDFile(object):
             raise ValueError("PID file %r contents invalid." % self.path)
             raise ValueError("PID file %r contents invalid." % self.path)
 
 
     def remove(self):
     def remove(self):
+        """Removes the lock."""
         try:
         try:
             os.unlink(self.path)
             os.unlink(self.path)
         except OSError, exc:
         except OSError, exc:
@@ -84,6 +129,8 @@ class PIDFile(object):
             raise
             raise
 
 
     def remove_if_stale(self):
     def remove_if_stale(self):
+        """Removes the lock if the process is not running.
+        (does not respond to signals)."""
         try:
         try:
             pid = self.read_pid()
             pid = self.read_pid()
         except ValueError, exc:
         except ValueError, exc:
@@ -103,13 +150,39 @@ class PIDFile(object):
                 return True
                 return True
         return False
         return False
 
 
+    def write_pid(self):
+        open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+        open_mode = (((os.R_OK | os.W_OK) << 6) |
+                        ((os.R_OK) << 3) |
+                        ((os.R_OK)))
+        pidfile_fd = os.open(self.path, open_flags, open_mode)
+        pidfile = os.fdopen(pidfile_fd, "w")
+        try:
+            pid = os.getpid()
+            pidfile.write("%d\n" % (pid, ))
+        finally:
+            pidfile.close()
+
 
 
 def create_pidlock(pidfile):
 def create_pidlock(pidfile):
-    """Create and verify pidfile.
+    """Create and verify pid file.
+
+    If the pid file already exists the program exits with an error message,
+    however if the process it refers to is not running anymore, the pid file
+    is deleted and the program continues.
+
+    The caller is responsible for releasing the lock before the program
+    exits.
 
 
-    If the pidfile already exists the program exits with an error message,
-    however if the process it refers to is not running anymore, the pidfile
-    is just deleted.
+    :returns: :class:`PIDFile`.
+
+    **Example**:
+
+    .. code-block:: python
+
+        import atexit
+        pidlock = create_pidlock("/var/run/app.pid").acquire()
+        atexit.register(pidlock.release)
 
 
     """
     """
 
 
@@ -124,11 +197,13 @@ def create_pidlock(pidfile):
 
 
 class DaemonContext(object):
 class DaemonContext(object):
     _is_open = False
     _is_open = False
+    workdir = DAEMON_WORKDIR
+    umask = DAEMON_UMASK
 
 
-    def __init__(self, pidfile=None, workdir=DAEMON_WORKDIR,
-            umask=DAEMON_UMASK, **kwargs):
-        self.workdir = workdir
-        self.umask = umask
+    def __init__(self, pidfile=None, workdir=None,
+            umask=None, **kwargs):
+        self.workdir = workdir or self.workdir
+        self.umask = self.umask if umask is None else umask
 
 
     def open(self):
     def open(self):
         if not self._is_open:
         if not self._is_open:
@@ -168,12 +243,49 @@ class DaemonContext(object):
 
 
 def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
 def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
              workdir=None, **opts):
              workdir=None, **opts):
+    """Detach the current process in the background (daemonize).
+
+    :keyword logfile: Optional log file.  The ability to write to this file
+       will be verified before the process is detached.
+    :keyword pidfile: Optional pid file.  The pid file will not be created,
+      as this is the responsibility of the child.  But the process will
+      exit if the pid lock exists and the pid written is still running.
+    :keyword uid: Optional user id or user name to change
+      effective privileges to.
+    :keyword gid: Optional group id or group name to change effective
+      privileges to.
+    :keyword umask: Optional umask that will be effective in the child process.
+    :keyword workdir: Optional new working directory.
+    :keyword \*\*opts: Ignored.
+
+    **Example**:
+
+    .. code-block:: python
+
+        import atexit
+        from celery.platforms import detached, create_pidlock
+
+        with detached(logfile="/var/log/app.log", pidfile="/var/run/app.pid",
+                      uid="nobody"):
+            # Now in detached child process with effective user set to nobody,
+            # and we know that our logfile can be written to, and that
+            # the pidfile is not locked.
+            pidlock = create_pidlock("/var/run/app.pid").acquire()
+            atexit.register(pidlock.release)
+
+            # Run the program
+            program.run(logfile="/var/log/app.log")
+
+    """
+
     if not resource:
     if not resource:
         raise RuntimeError("This platform does not support detach.")
         raise RuntimeError("This platform does not support detach.")
     workdir = os.getcwd() if workdir is None else workdir
     workdir = os.getcwd() if workdir is None else workdir
 
 
     signals.reset("SIGCLD")  # Make sure SIGCLD is using the default handler.
     signals.reset("SIGCLD")  # Make sure SIGCLD is using the default handler.
-    set_effective_user(uid=uid, gid=gid)
+    if not os.geteuid():
+        # no point trying to setuid unless we're root.
+        maybe_drop_privileges(uid=uid, gid=gid)
 
 
     # Since without stderr any errors will be silently suppressed,
     # Since without stderr any errors will be silently suppressed,
     # we need to know that we have access to the logfile.
     # we need to know that we have access to the logfile.
@@ -187,7 +299,7 @@ def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
 def parse_uid(uid):
 def parse_uid(uid):
     """Parse user id.
     """Parse user id.
 
 
-    uid can be an interger (uid) or a string (username), if a username
+    uid can be an integer (uid) or a string (user name), if a user name
     the uid is taken from the password file.
     the uid is taken from the password file.
 
 
     """
     """
@@ -220,45 +332,139 @@ def parse_gid(gid):
         raise
         raise
 
 
 
 
+def _setgroups_hack(groups):
+    """:fun:`setgroups` may have a platform-dependent limit,
+    and it is not always possible to know in advance what this limit
+    is, so we use this ugly hack stolen from glibc."""
+    groups = groups[:]
+
+    while 1:
+        try:
+            return os.setgroups(groups)
+        except ValueError:   # error from Python's check.
+            if len(groups) <= 1:
+                raise
+            groups[:] = groups[:-1]
+        except OSError, exc:  # error from the OS.
+            if exc.errno != errno.EINVAL or len(groups) <= 1:
+                raise
+            groups[:] = groups[:-1]
+
+
+def setgroups(groups):
+    max_groups = None
+    try:
+        max_groups = os.sysconf("SC_NGROUPS_MAX")
+    except:
+        pass
+    try:
+        return _setgroups_hack(groups[:max_groups])
+    except OSError, exc:
+        if exc.errno != errno.EPERM:
+            raise
+        if any(group not in groups for group in os.getgroups()):
+            # we shouldn't be allowed to change to this group.
+            raise
+
+
+def initgroups(uid, gid):
+    if grp and pwd:
+        username = pwd.getpwuid(uid)[0]
+        if hasattr(os, "initgroups"):  # Python 2.7+
+            return os.initgroups(username, gid)
+        groups = [gr.gr_gid for gr in grp.getgrall()
+                                if username in gr.gr_mem]
+        setgroups(groups)
+
+
 def setegid(gid):
 def setegid(gid):
     """Set effective group id."""
     """Set effective group id."""
     gid = parse_gid(gid)
     gid = parse_gid(gid)
-    if gid != os.getgid():
+    if gid != os.getegid():
         os.setegid(gid)
         os.setegid(gid)
 
 
 
 
 def seteuid(uid):
 def seteuid(uid):
     """Set effective user id."""
     """Set effective user id."""
     uid = parse_uid(uid)
     uid = parse_uid(uid)
-    if uid != os.getuid():
+    if uid != os.geteuid():
         os.seteuid(uid)
         os.seteuid(uid)
 
 
 
 
-def set_effective_user(uid=None, gid=None):
+def setgid(gid):
+    os.setgid(parse_gid(gid))
+
+
+def setuid(uid):
+    os.setuid(parse_uid(uid))
+
+
+def maybe_drop_privileges(uid=None, gid=None):
     """Change process privileges to new user/group.
     """Change process privileges to new user/group.
 
 
-    If uid and gid is set the effective user/group is set.
+    If UID and GID is specified, the real user/group is changed.
 
 
-    If only uid is set, the effective uer is set, and the group is
-    set to the users primary group.
+    If only UID is specified, the real user is changed, and the group is
+    changed to the users primary group.
 
 
-    If only gid is set, the effective group is set.
+    If only GID is specified, only the group is changed.
 
 
     """
     """
     uid = uid and parse_uid(uid)
     uid = uid and parse_uid(uid)
     gid = gid and parse_gid(gid)
     gid = gid and parse_gid(gid)
 
 
     if uid:
     if uid:
-        # If gid isn't defined, get the primary gid of the uer.
+        # If GID isn't defined, get the primary GID of the user.
         if not gid and pwd:
         if not gid and pwd:
             gid = pwd.getpwuid(uid).pw_gid
             gid = pwd.getpwuid(uid).pw_gid
-        setegid(gid)
-        seteuid(uid)
+        # Must set the GID before initgroups(), as setgid()
+        # is known to zap the group list on some platforms.
+        setgid(gid)
+        initgroups(uid, gid)
+
+        # at last:
+        setuid(uid)
     else:
     else:
-        gid and setegid(gid)
+        gid and setgid(gid)
 
 
 
 
 class Signals(object):
 class Signals(object):
+    """Convenience interface to :mod:`signals`.
+
+    If the requested signal is not supported on the current platform,
+    the operation will be ignored.
+
+    **Examples**:
+
+    .. code-block:: python
+
+        >>> from celery.platforms import signals
+
+        >>> signals["INT"] = my_handler
+
+        >>> signals["INT"]
+        my_handler
+
+        >>> signals.supported("INT")
+        True
+
+        >>> signals.signum("INT")
+        2
+
+        >>> signals.ignore("USR1")
+        >>> signals["USR1"] == signals.ignored
+        True
+
+        >>> signals.reset("USR1")
+        >>> signals["USR1"] == signals.default
+        True
+
+        >>> signals.update(INT=exit_handler,
+        ...                TERM=exit_handler,
+        ...                HUP=hup_handler)
+
+    """
+
     ignored = _signal.SIG_IGN
     ignored = _signal.SIG_IGN
     default = _signal.SIG_DFL
     default = _signal.SIG_DFL
 
 
@@ -361,3 +567,11 @@ def set_mp_process_title(progname, info=None, hostname=None):
     else:
     else:
         return set_process_title("%s:%s" % (progname,
         return set_process_title("%s:%s" % (progname,
                                             current_process().name), info=info)
                                             current_process().name), info=info)
+
+
+def shellsplit(s, posix=True):
+    # posix= option to shlex.split first available in Python 2.6+
+    lexer = shlex.shlex(s, posix=not IS_WINDOWS)
+    lexer.whitespace_split = True
+    lexer.commenters = ''
+    return list(lexer)

+ 12 - 2
celery/registry.py

@@ -1,9 +1,19 @@
-"""celery.registry"""
+# -*- coding: utf-8 -*-
+"""
+    celery.registry
+    ~~~~~~~~~~~~~~~
+
+    Registry of available tasks.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
 import inspect
 import inspect
 
 
-from celery.exceptions import NotRegistered
+from .exceptions import NotRegistered
 
 
 
 
 class TaskRegistry(dict):
 class TaskRegistry(dict):

+ 38 - 33
celery/result.py

@@ -1,26 +1,39 @@
-from __future__ import absolute_import, with_statement
+# -*- coding: utf-8 -*-
+"""
+    celery.result
+    ~~~~~~~~~~~~~
+
+    Task results/state and groups of results.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+from __future__ import with_statement
 
 
 import time
 import time
 
 
 from copy import copy
 from copy import copy
 from itertools import imap
 from itertools import imap
 
 
-from celery import current_app
-from celery import states
-from celery.app import app_or_default
-from celery.exceptions import TimeoutError
-from celery.registry import _unpickle_task
+from . import current_app
+from . import states
+from .app import app_or_default
+from .exceptions import TimeoutError
+from .registry import _unpickle_task
+from .utils.compat import OrderedDict
 
 
 
 
 def _unpickle_result(task_id, task_name):
 def _unpickle_result(task_id, task_name):
     return _unpickle_task(task_name).AsyncResult(task_id)
     return _unpickle_task(task_name).AsyncResult(task_id)
 
 
 
 
-class BaseAsyncResult(object):
-    """Base class for pending result, supports custom task result backend.
+class AsyncResult(object):
+    """Query task state.
 
 
     :param task_id: see :attr:`task_id`.
     :param task_id: see :attr:`task_id`.
-    :param backend: see :attr:`backend`.
+    :keyword backend: see :attr:`backend`.
 
 
     """
     """
 
 
@@ -33,10 +46,10 @@ class BaseAsyncResult(object):
     #: The task result backend to use.
     #: The task result backend to use.
     backend = None
     backend = None
 
 
-    def __init__(self, task_id, backend, task_name=None, app=None):
+    def __init__(self, task_id, backend=None, task_name=None, app=None):
         self.app = app_or_default(app)
         self.app = app_or_default(app)
         self.task_id = task_id
         self.task_id = task_id
-        self.backend = backend
+        self.backend = backend or self.app.backend
         self.task_name = task_name
         self.task_name = task_name
 
 
     def forget(self):
     def forget(self):
@@ -181,23 +194,7 @@ class BaseAsyncResult(object):
     def status(self):
     def status(self):
         """Deprecated alias of :attr:`state`."""
         """Deprecated alias of :attr:`state`."""
         return self.state
         return self.state
-
-
-class AsyncResult(BaseAsyncResult):
-    """Pending task result using the default backend.
-
-    :param task_id: The task uuid.
-
-    """
-
-    #: Task result store backend to use.
-    backend = None
-
-    def __init__(self, task_id, backend=None, task_name=None, app=None):
-        app = app_or_default(app)
-        backend = backend or app.backend
-        super(AsyncResult, self).__init__(task_id, backend,
-                                          task_name=task_name, app=app)
+BaseAsyncResult = AsyncResult  # for backwards compatibility.
 
 
 
 
 class ResultSet(object):
 class ResultSet(object):
@@ -326,15 +323,19 @@ class ResultSet(object):
 
 
         """
         """
         elapsed = 0.0
         elapsed = 0.0
-        results = dict((result.task_id, copy(result))
-                            for result in self.results)
+        results = OrderedDict((result.task_id, copy(result))
+                                for result in self.results)
 
 
         while results:
         while results:
             removed = set()
             removed = set()
             for task_id, result in results.iteritems():
             for task_id, result in results.iteritems():
-                yield result.get(timeout=timeout and timeout - elapsed,
-                                 propagate=propagate, interval=0.0)
-                removed.add(task_id)
+                if result.ready():
+                    yield result.get(timeout=timeout and timeout - elapsed,
+                                     propagate=propagate)
+                    removed.add(task_id)
+                else:
+                    if result.backend.subpolling_interval:
+                        time.sleep(result.backend.subpolling_interval)
             for task_id in removed:
             for task_id in removed:
                 results.pop(task_id, None)
                 results.pop(task_id, None)
             time.sleep(interval)
             time.sleep(interval)
@@ -434,6 +435,10 @@ class ResultSet(object):
         """Deprecated alias to :attr:`results`."""
         """Deprecated alias to :attr:`results`."""
         return self.results
         return self.results
 
 
+    @property
+    def supports_native_join(self):
+        return self.results[0].backend.supports_native_join
+
 
 
 class TaskSetResult(ResultSet):
 class TaskSetResult(ResultSet):
     """An instance of this class is returned by
     """An instance of this class is returned by

+ 15 - 3
celery/routes.py

@@ -1,7 +1,19 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.routes
+    ~~~~~~~~~~~~~
+
+    Contains utilities for working with task routes
+    (:setting:`CELERY_ROUTES`).
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
-from celery.exceptions import QueueNotFound
-from celery.utils import firstmethod, instantiate, lpmerge, mpromise
+from .exceptions import QueueNotFound
+from .utils import firstmethod, instantiate, lpmerge, mpromise
 
 
 _first_route = firstmethod("route_for_task")
 _first_route = firstmethod("route_for_task")
 
 
@@ -22,7 +34,7 @@ class Router(object):
 
 
     def __init__(self, routes=None, queues=None, create_missing=False,
     def __init__(self, routes=None, queues=None, create_missing=False,
             app=None):
             app=None):
-        from celery.app import app_or_default
+        from .app import app_or_default
         self.app = app_or_default(app)
         self.app = app_or_default(app)
         self.queues = {} if queues is None else queues
         self.queues = {} if queues is None else queues
         self.routes = [] if routes is None else routes
         self.routes = [] if routes is None else routes

+ 78 - 63
celery/schedules.py

@@ -1,13 +1,29 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.schedules
+    ~~~~~~~~~~~~~~~~
+
+    Schedules define the intervals at which periodic tasks
+    should run.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import absolute_import
 
 
+import re
+
 from datetime import datetime, timedelta
 from datetime import datetime, timedelta
 from dateutil.relativedelta import relativedelta
 from dateutil.relativedelta import relativedelta
-from pyparsing import (Word, Literal, ZeroOrMore, Optional,
-                       Group, StringEnd, alphas)
 
 
-from celery.utils import is_iterable
-from celery.utils.timeutils import (timedelta_seconds, weekday,
-                                    remaining, humanize_seconds)
+from .utils import is_iterable
+from .utils.timeutils import (timedelta_seconds, weekday,
+                              remaining, humanize_seconds)
+
+
+class ParseException(Exception):
+    """Raised by crontab_parser when the input can't be parsed."""
 
 
 
 
 class schedule(object):
 class schedule(object):
@@ -50,8 +66,8 @@ class schedule(object):
         return False, rem
         return False, rem
 
 
     def __repr__(self):
     def __repr__(self):
-        return "<freq: %s>" % humanize_seconds(
-                timedelta_seconds(self.run_every))
+        return "<freq: %s>" % (
+                    humanize_seconds(timedelta_seconds(self.run_every)), )
 
 
     def __eq__(self, other):
     def __eq__(self, other):
         if isinstance(other, schedule):
         if isinstance(other, schedule):
@@ -85,70 +101,69 @@ class crontab_parser(object):
         [0, 1, 2, 3, 4, 5, 6]
         [0, 1, 2, 3, 4, 5, 6]
 
 
     """
     """
+    ParseException = ParseException
 
 
-    def __init__(self, max_=60):
-        # define the grammar structure
-        digits = "0123456789"
-        star = Literal('*')
-        number = Word(digits) | Word(alphas)
-        steps = number
-        range_ = number + Optional(Literal('-') + number)
-        numspec = star | range_
-        expr = Group(numspec) + Optional(Literal('/') + steps)
-        extra_groups = ZeroOrMore(Literal(',') + expr)
-        groups = expr + extra_groups + StringEnd()
-
-        # define parse actions
-        star.setParseAction(self._expand_star)
-        number.setParseAction(self._expand_number)
-        range_.setParseAction(self._expand_range)
-        expr.setParseAction(self._filter_steps)
-        extra_groups.setParseAction(self._ignore_comma)
-        groups.setParseAction(self._join_to_set)
+    _range = r'(\w+?)-(\w+)'
+    _steps = r'/(\w+)?'
+    _star = r'\*'
 
 
+    def __init__(self, max_=60):
         self.max_ = max_
         self.max_ = max_
-        self.parser = groups
-
-    @staticmethod
-    def _expand_number(toks):
-        try:
-            i = int(toks[0])
-        except ValueError:
-            try:
-                i = weekday(toks[0])
-            except KeyError:
-                raise ValueError("Invalid weekday literal '%s'." % toks[0])
-        return [i]
-
-    @staticmethod
-    def _expand_range(toks):
+        self.pats = (
+                (re.compile(self._range + self._steps), self._range_steps),
+                (re.compile(self._range), self._expand_range),
+                (re.compile(self._star + self._steps), self._star_steps),
+                (re.compile('^' + self._star + '$'), self._expand_star))
+
+    def parse(self, spec):
+        acc = set()
+        for part in spec.split(','):
+            if not part:
+                raise self.ParseException("empty part")
+            acc |= set(self._parse_part(part))
+        return acc
+
+    def _parse_part(self, part):
+        for regex, handler in self.pats:
+            m = regex.match(part)
+            if m:
+                return handler(m.groups())
+        return self._expand_range((part, ))
+
+    def _expand_range(self, toks):
+        fr = self._expand_number(toks[0])
         if len(toks) > 1:
         if len(toks) > 1:
-            return range(toks[0], int(toks[2]) + 1)
-        else:
-            return toks[0]
+            to = self._expand_number(toks[1])
+            return range(fr, min(to + 1, self.max_ + 1))
+        return [fr]
 
 
-    def _expand_star(self, toks):
-        return range(self.max_)
+    def _range_steps(self, toks):
+        if len(toks) != 3 or not toks[2]:
+            raise self.ParseException("empty filter")
+        return self._filter_steps(self._expand_range(toks[:2]), int(toks[2]))
 
 
-    @staticmethod
-    def _filter_steps(toks):
-        numbers = toks[0]
-        if len(toks) > 1:
-            steps = toks[2]
-            return [n for n in numbers if n % steps == 0]
-        else:
-            return numbers
+    def _star_steps(self, toks):
+        if not toks or not toks[0]:
+            raise self.ParseException("empty filter")
+        return self._filter_steps(self._expand_star(), int(toks[0]))
 
 
-    @staticmethod
-    def _ignore_comma(toks):
-        return [x for x in toks if x != ',']
+    def _filter_steps(self, numbers, steps):
+        return [n for n in numbers if n % steps == 0]
 
 
-    @staticmethod
-    def _join_to_set(toks):
-        return set(toks.asList())
+    def _expand_star(self, *args):
+        return range(self.max_)
 
 
-    def parse(self, cronspec):
-        return self.parser.parseString(cronspec).pop()
+    def _expand_number(self, s):
+        if isinstance(s, basestring) and s[0] == '-':
+            raise self.ParseException("negative numbers not supported")
+        try:
+            i = int(s)
+        except ValueError:
+            try:
+                i = weekday(s)
+            except KeyError:
+                raise ValueError("Invalid weekday literal '%s'." % s)
+        return i
 
 
 
 
 class crontab(schedule):
 class crontab(schedule):
@@ -236,7 +251,7 @@ class crontab(schedule):
         return result
         return result
 
 
     def __init__(self, minute='*', hour='*', day_of_week='*',
     def __init__(self, minute='*', hour='*', day_of_week='*',
-            nowfun=datetime.now):
+            nowfun=datetime.utcnow):
         self._orig_minute = minute
         self._orig_minute = minute
         self._orig_hour = hour
         self._orig_hour = hour
         self._orig_day_of_week = day_of_week
         self._orig_day_of_week = day_of_week

+ 72 - 29
celery/security/__init__.py

@@ -1,39 +1,82 @@
-from kombu.serialization import unregister, SerializerNotInstalled
+from __future__ import absolute_import
+from __future__ import with_statement
 
 
-from celery.app import app_or_default
-from celery.security.serialization import register_auth
-from celery.exceptions import ImproperlyConfigured
+from kombu.serialization import registry, SerializerNotInstalled
 
 
-def _disable_insecure_serializers():
-    for name in ('pickle', 'json', 'yaml', 'msgpack'):
+from .. import current_app
+from ..exceptions import ImproperlyConfigured
+
+from .serialization import register_auth
+
+SSL_NOT_INSTALLED = """\
+You need to install the pyOpenSSL library to use the auth serializer.
+Please install by:
+
+    $ pip install pyOpenSSL
+"""
+
+SETTING_MISSING = """\
+Sorry, but you have to configure the
+    * CELERY_SECURITY_KEY
+    * CELERY_SECURITY_CERTIFICATE, and the
+    * CELERY_SECURITY_CERT_STORE
+configuration settings to use the auth serializer.
+
+Please see the configuration reference for more information.
+"""
+
+
+def disable_untrusted_serializers(whitelist=None):
+    for name in set(registry._decoders.keys()) - set(whitelist or []):
         try:
         try:
-            unregister(name)
+            registry.disable(name)
         except SerializerNotInstalled:
         except SerializerNotInstalled:
             pass
             pass
 
 
-def setup_security():
-    """setup secure serialization"""
-    conf = app_or_default().conf
-    if conf.CELERY_TASK_SERIALIZER != 'auth':
+
+def setup_security(allowed_serializers=None, key=None, cert=None, store=None,
+        digest="sha1", serializer="json"):
+    """Setup the message-signing serializer.
+
+    Disables untrusted serializers and if configured to use the ``auth``
+    serializer will register the auth serializer with the provided settings
+    into the Kombu serializer registry.
+
+    :keyword allowed_serializers:  List of serializer names, or content_types
+        that should be exempt from being disabled.
+    :keyword key: Name of private key file to use.
+        Defaults to the :setting:`CELERY_SECURITY_KEY` setting.
+    :keyword cert: Name of certificate file to use.
+        Defaults to the :setting:`CELERY_SECURITY_CERTIFICATE` setting.
+    :keyword store: Directory containing certificates.
+        Defaults to the :setting:`CELERY_SECURITY_CERT_STORE` setting.
+    :keyword digest: Digest algorithm used when signing messages.
+        Default is ``sha1``.
+    :keyword serializer: Serializer used to encode messages after
+        they have been signed.  See :setting:`CELERY_TASK_SERIALIZER` for
+        the serializers supported.
+        Default is ``json``.
+
+    """
+
+    disable_untrusted_serializers(allowed_serializers)
+
+    conf = current_app.conf
+    if conf.CELERY_TASK_SERIALIZER != "auth":
         return
         return
 
 
     try:
     try:
-        from OpenSSL import crypto
+        from OpenSSL import crypto  # noqa
     except ImportError:
     except ImportError:
-        raise ImproperlyConfigured(
-            "You need to install pyOpenSSL library to use "
-            "the auth serializer.")
-
-    key = getattr(conf, 'CELERY_SECURITY_KEY', None)
-    cert = getattr(conf, 'CELERY_SECURITY_CERTIFICATE', None)
-    store = getattr(conf, 'CELERY_SECURITY_CERT_STORE', None)
-
-    if key is None or cert is None or store is None:
-        raise ImproperlyConfigured(
-            "CELERY_SECURITY_KEY, CELERY_SECURITY_CERTIFICATE and "
-            "CELERY_SECURITY_CERT_STORE options are required "
-            "for auth serializer")
-
-    with open(key) as kf, open(cert) as cf:
-        register_auth(kf.read(), cf.read(), store)
-    _disable_insecure_serializers()
+        raise ImproperlyConfigured(SSL_NOT_INSTALLED)
+
+    key = key or conf.CELERY_SECURITY_KEY
+    cert = cert or conf.CELERY_SECURITY_CERTIFICATE
+    store = store or conf.CELERY_SECURITY_CERT_STORE
+
+    if any(not v for v in (key, cert, store)):
+        raise ImproperlyConfigured(SETTING_MISSING)
+
+    with open(key) as kf:
+        with open(cert) as cf:
+            register_auth(kf.read(), cf.read(), store)

+ 30 - 22
celery/security/certificate.py

@@ -1,48 +1,55 @@
+from __future__ import absolute_import
+from __future__ import with_statement
+
 import os
 import os
 import glob
 import glob
 
 
 try:
 try:
     from OpenSSL import crypto
     from OpenSSL import crypto
 except ImportError:
 except ImportError:
-    crypto = None
+    crypto = None  # noqa
+
+from ..exceptions import SecurityError
 
 
-from celery.security.exceptions import SecurityError
 
 
 class Certificate(object):
 class Certificate(object):
-    """X.509 certificate"""
+    """X.509 certificate."""
+
     def __init__(self, cert):
     def __init__(self, cert):
         assert crypto is not None
         assert crypto is not None
         try:
         try:
             self._cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
             self._cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
-        except crypto.Error, e:
-            raise SecurityError("Invalid certificate", e)
+        except crypto.Error, exc:
+            raise SecurityError("Invalid certificate: %r" % (exc, ))
 
 
     def has_expired(self):
     def has_expired(self):
-        """check if the certificate has expired"""
+        """Check if the certificate has expired."""
         return self._cert.has_expired()
         return self._cert.has_expired()
 
 
     def get_serial_number(self):
     def get_serial_number(self):
-        """return certificate serial number"""
+        """Returns the certificates serial number."""
         return self._cert.get_serial_number()
         return self._cert.get_serial_number()
 
 
     def get_issuer(self):
     def get_issuer(self):
-        """return issuer (CA) as a string"""
-        return ' '.join(map(lambda x: x[1],
-                            self._cert.get_issuer().get_components()))
+        """Returns issuer (CA) as a string"""
+        return ' '.join(x[1] for x in
+                        self._cert.get_issuer().get_components())
 
 
     def get_id(self):
     def get_id(self):
-        """serial number/issuer pair uniquely identifies a certificate"""
+        """Serial number/issuer pair uniquely identifies a certificate"""
         return "%s %s" % (self.get_issuer(), self.get_serial_number())
         return "%s %s" % (self.get_issuer(), self.get_serial_number())
 
 
-    def verify(self, data, signature):
-        """verify the signature for a data string"""
+    def verify(self, data, signature, digest):
+        """Verifies the signature for string containing data."""
         try:
         try:
-            crypto.verify(self._cert, signature, data, 'sha1')
-        except crypto.Error, e:
-            raise SecurityError("Bad signature", e)
+            crypto.verify(self._cert, signature, data, digest)
+        except crypto.Error, exc:
+            raise SecurityError("Bad signature: %r" % (exc, ))
+
 
 
 class CertStore(object):
 class CertStore(object):
     """Base class for certificate stores"""
     """Base class for certificate stores"""
+
     def __init__(self):
     def __init__(self):
         self._certs = {}
         self._certs = {}
 
 
@@ -55,16 +62,18 @@ class CertStore(object):
         """get certificate by id"""
         """get certificate by id"""
         try:
         try:
             return self._certs[id]
             return self._certs[id]
-        except KeyError, e:
-            raise SecurityError("Unknown certificate: %s" % id, e)
+        except KeyError:
+            raise SecurityError("Unknown certificate: %r" % (id, ))
 
 
     def add_cert(self, cert):
     def add_cert(self, cert):
         if cert.get_id() in self._certs:
         if cert.get_id() in self._certs:
-            raise SecurityError("Duplicate certificate: %s" % id)
+            raise SecurityError("Duplicate certificate: %r" % (id, ))
         self._certs[cert.get_id()] = cert
         self._certs[cert.get_id()] = cert
 
 
+
 class FSCertStore(CertStore):
 class FSCertStore(CertStore):
     """File system certificate store"""
     """File system certificate store"""
+
     def __init__(self, path):
     def __init__(self, path):
         CertStore.__init__(self)
         CertStore.__init__(self)
         if os.path.isdir(path):
         if os.path.isdir(path):
@@ -73,7 +82,6 @@ class FSCertStore(CertStore):
             with open(p) as f:
             with open(p) as f:
                 cert = Certificate(f.read())
                 cert = Certificate(f.read())
                 if cert.has_expired():
                 if cert.has_expired():
-                    raise SecurityError("Expired certificate: %s" %\
-                                        cert.get_id())
+                    raise SecurityError(
+                        "Expired certificate: %r" % (cert.get_id(), ))
                 self.add_cert(cert)
                 self.add_cert(cert)
-

+ 0 - 5
celery/security/exceptions.py

@@ -1,5 +0,0 @@
-class SecurityError(Exception):
-    """Security related exceptions"""
-    def __init__(self, msg, exc=None, *args, **kwargs):
-        Exception.__init__(self, msg, exc, *args, **kwargs)
-

+ 13 - 10
celery/security/key.py

@@ -1,22 +1,25 @@
+from __future__ import absolute_import
+
 try:
 try:
     from OpenSSL import crypto
     from OpenSSL import crypto
 except ImportError:
 except ImportError:
-    crypto = None
+    crypto = None  # noqa
+
+from ..exceptions import SecurityError
 
 
-from celery.security.exceptions import SecurityError
 
 
 class PrivateKey(object):
 class PrivateKey(object):
+
     def __init__(self, key):
     def __init__(self, key):
         assert crypto is not None
         assert crypto is not None
         try:
         try:
             self._key = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
             self._key = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
-        except crypto.Error, e:
-            raise SecurityError("Invalid private key", e)
+        except crypto.Error, exc:
+            raise SecurityError("Invalid private key: %r" % (exc, ))
 
 
-    def sign(self, data):
-        """sign a data string"""
+    def sign(self, data, digest):
+        """sign string containing data."""
         try:
         try:
-            return crypto.sign(self._key, data, 'sha1')
-        except crypto.Error, e:
-            raise SecurityError("Unable to sign a data string", e)
-
+            return crypto.sign(self._key, data, digest)
+        except crypto.Error, exc:
+            raise SecurityError("Unable to sign data: %r" % (exc, ))

+ 60 - 32
celery/security/serialization.py

@@ -1,57 +1,85 @@
-import anyjson
+from __future__ import absolute_import
 
 
-from kombu.serialization import registry
+import base64
+
+from kombu.serialization import registry, encode, decode
+
+from ..exceptions import SecurityError
+from ..utils.encoding import bytes_to_str, str_to_bytes
+
+from .certificate import Certificate, FSCertStore
+from .key import PrivateKey
+
+
+def b64encode(s):
+    return bytes_to_str(base64.b64encode(str_to_bytes(s)))
+
+
+def b64decode(s):
+    return base64.b64decode(str_to_bytes(s))
 
 
-from celery.security.certificate import Certificate, FSCertStore
-from celery.security.key import PrivateKey
-from celery.security.exceptions import SecurityError
 
 
 class SecureSerializer(object):
 class SecureSerializer(object):
 
 
     def __init__(self, key=None, cert=None, cert_store=None,
     def __init__(self, key=None, cert=None, cert_store=None,
-                       serialize=anyjson.serialize,
-                       deserialize=anyjson.deserialize):
+            digest="sha1", serializer="json"):
         self._key = key
         self._key = key
         self._cert = cert
         self._cert = cert
         self._cert_store = cert_store
         self._cert_store = cert_store
-        self._serialize = serialize
-        self._deserialize = deserialize
+        self._digest = digest
+        self._serializer = serializer
 
 
     def serialize(self, data):
     def serialize(self, data):
         """serialize data structure into string"""
         """serialize data structure into string"""
         assert self._key is not None
         assert self._key is not None
         assert self._cert is not None
         assert self._cert is not None
         try:
         try:
-            data = self._serialize(data)
-            signature = self._key.sign(data).encode("base64")
-            signer = self._cert.get_id()
-            return self._serialize(dict(data=data,
-                                        signer=signer,
-                                        signature=signature))
-        except Exception, e:
-            raise SecurityError("Unable to serialize", e)
+            content_type, content_encoding, body = encode(
+                    data, serializer=self._serializer)
+            # What we sign is the serialized body, not the body itself.
+            # this way the receiver doesn't have to decode the contents
+            # to verify the signature (and thus avoiding potential flaws
+            # in the decoding step).
+            return self._pack(body, content_type, content_encoding,
+                              signature=self._key.sign(body, self._digest),
+                              signer=self._cert.get_id())
+        except Exception, exc:
+            raise SecurityError("Unable to serialize: %r" % (exc, ))
 
 
     def deserialize(self, data):
     def deserialize(self, data):
         """deserialize data structure from string"""
         """deserialize data structure from string"""
         assert self._cert_store is not None
         assert self._cert_store is not None
         try:
         try:
-            data = self._deserialize(data)
-            signature = data['signature'].decode("base64")
-            signer = data['signer']
-            data = data['data']
-            self._cert_store[signer].verify(data, signature)
-            return self._deserialize(data)
-        except Exception, e:
-            raise SecurityError("Unable to deserialize", e)
-
-def register_auth(key=None, cert=None, store=None):
+            payload = self._unpack(data)
+            signature, signer, body = (payload["signature"],
+                                       payload["signer"],
+                                       payload["body"])
+            self._cert_store[signer].verify(body,
+                                            signature, self._digest)
+        except Exception, exc:
+            raise SecurityError("Unable to deserialize: %r" % (exc, ))
+
+        return decode(body, payload["content_type"],
+                            payload["content_encoding"], force=True)
+
+    def _pack(self, body, content_type, content_encoding, signer, signature,
+            sep='\x00\x01'):
+        return b64encode(sep.join([signer, signature,
+                                   content_type, content_encoding, body]))
+
+    def _unpack(self, payload, sep='\x00\x01',
+            fields=("signer", "signature", "content_type",
+                    "content_encoding", "body")):
+        return dict(zip(fields, b64decode(payload).split(sep)))
+
+
+def register_auth(key=None, cert=None, store=None, digest="sha1",
+        serializer="json"):
     """register security serializer"""
     """register security serializer"""
-    global s
     s = SecureSerializer(key and PrivateKey(key),
     s = SecureSerializer(key and PrivateKey(key),
                          cert and Certificate(cert),
                          cert and Certificate(cert),
                          store and FSCertStore(store),
                          store and FSCertStore(store),
-                         anyjson.serialize, anyjson.deserialize)
+                         digest=digest, serializer=serializer)
     registry.register("auth", s.serialize, s.deserialize,
     registry.register("auth", s.serialize, s.deserialize,
-                      content_type='application/data',
-                      content_encoding='utf-8')
-
+                      content_type="application/data",
+                      content_encoding="utf-8")

+ 14 - 268
celery/signals.py

@@ -1,284 +1,25 @@
+# -*- coding: utf-8 -*-
 """
 """
-==============
-celery.signals
-==============
+    celery.signals
+    ~~~~~~~~~~~~~~
 
 
-Signals allows decoupled applications to receive notifications when
-certain actions occur elsewhere in the application.
-
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
-
-.. contents::
-    :local:
-
-.. _signal-basics:
-
-Basics
-======
-
-Several kinds of events trigger signals, you can connect to these signals
-to perform actions as they trigger.
-
-Example connecting to the :signal:`task_sent` signal:
-
-.. code-block:: python
-
-    from celery.signals import task_sent
-
-    def task_sent_handler(sender=None, task_id=None, task=None, args=None,
-                          kwargs=None, **kwds):
-        print("Got signal task_sent for task id %s" % (task_id, ))
-
-    task_sent.connect(task_sent_handler)
-
-
-Some signals also have a sender which you can filter by. For example the
-:signal:`task_sent` signal uses the task name as a sender, so you can
-connect your handler to be called only when tasks with name `"tasks.add"`
-has been sent by providing the `sender` argument to
-:class:`~celery.utils.dispatch.signal.Signal.connect`:
-
-.. code-block:: python
-
-    task_sent.connect(task_sent_handler, sender="tasks.add")
-
-.. _signal-ref:
-
-Signals
-=======
-
-Task Signals
-------------
-
-.. signal:: task_sent
-
-task_sent
-~~~~~~~~~
-
-Dispatched when a task has been sent to the broker.
-Note that this is executed in the client process, the one sending
-the task, not in the worker.
-
-Sender is the name of the task being sent.
-
-Provides arguments:
-
-* task_id
-    Id of the task to be executed.
-
-* task
-    The task being executed.
-
-* args
-    the tasks positional arguments.
-
-* kwargs
-    The tasks keyword arguments.
-
-* eta
-    The time to execute the task.
-
-* taskset
-    Id of the taskset this task is part of (if any).
-
-.. signal:: task_prerun
-
-task_prerun
-~~~~~~~~~~~
-
-Dispatched before a task is executed.
-
-Sender is the task class being executed.
-
-Provides arguments:
-
-* task_id
-    Id of the task to be executed.
-
-* task
-    The task being executed.
-
-* args
-    the tasks positional arguments.
-
-* kwargs
-    The tasks keyword arguments.
-
-.. signal:: task_postrun
-
-task_postrun
-~~~~~~~~~~~~
-
-Dispatched after a task has been executed.
-
-Sender is the task class executed.
-
-Provides arguments:
-
-* task_id
-    Id of the task to be executed.
-
-* task
-    The task being executed.
-
-* args
-    The tasks positional arguments.
-
-* kwargs
-    The tasks keyword arguments.
-
-* retval
-    The return value of the task.
-
-.. signal:: task_failure
-
-task_failure
-~~~~~~~~~~~~
-
-Dispatched when a task fails.
-
-Sender is the task class executed.
-
-Provides arguments:
-
-* task_id
-    Id of the task.
-
-* exception
-    Exception instance raised.
-
-* args
-    Positional arguments the task was called with.
-
-* kwargs
-    Keyword arguments the task was called with.
-
-* traceback
-    Stack trace object.
-
-* einfo
-    The :class:`celery.datastructures.ExceptionInfo` instance.
-
-Worker Signals
---------------
-
-.. signal:: worker_init
-
-worker_init
-~~~~~~~~~~~
-
-Dispatched before the worker is started.
-
-.. signal:: worker_ready
-
-worker_ready
-~~~~~~~~~~~~
-
-Dispatched when the worker is ready to accept work.
-
-.. signal:: worker_process_init
-
-worker_process_init
-~~~~~~~~~~~~~~~~~~~
-
-Dispatched by each new pool worker process when it starts.
-
-.. signal:: worker_shutdown
-
-worker_shutdown
-~~~~~~~~~~~~~~~
-
-Dispatched when the worker is about to shut down.
-
-Celerybeat Signals
-------------------
-
-.. signal:: beat_init
-
-beat_init
-~~~~~~~~~
-
-Dispatched when celerybeat starts (either standalone or embedded).
-Sender is the :class:`celery.beat.Service` instance.
-
-.. signal:: beat_embedded_init
-
-beat_embedded_init
-~~~~~~~~~~~~~~~~~~
-
-Dispatched in addition to the :signal:`beat_init` signal when celerybeat is
-started as an embedded process.  Sender is the
-:class:`celery.beat.Service` instance.
-
-Eventlet Signals
-----------------
-
-.. signal:: eventlet_pool_started
-
-eventlet_pool_started
-~~~~~~~~~~~~~~~~~~~~~
-
-Sent when the eventlet pool has been started.
-
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
-
-.. signal:: eventlet_pool_preshutdown
-
-eventlet_pool_preshutdown
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Sent when the worker shutdown, just before the eventlet pool
-is requested to wait for remaining workers.
-
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
-
-.. signal:: eventlet_pool_postshutdown
-
-eventlet_pool_postshutdown
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Sent when the pool has been joined and the worker is ready to shutdown.
-
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
-
-.. signal:: eventlet_pool_apply
-
-eventlet_pool_apply
-~~~~~~~~~~~~~~~~~~~
-
-Sent whenever a task is applied to the pool.
-
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
-
-Provides arguments:
-
-* target
-
-    The target function.
-
-* args
-
-    Positional arguments.
-
-* kwargs
-
-    Keyword arguments.
+    See :ref:`signals`.
 
 
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 
 """
 """
-from celery.utils.dispatch import Signal
+from __future__ import absolute_import
+
+from .utils.dispatch import Signal
 
 
 task_sent = Signal(providing_args=["task_id", "task",
 task_sent = Signal(providing_args=["task_id", "task",
                                    "args", "kwargs",
                                    "args", "kwargs",
                                    "eta", "taskset"])
                                    "eta", "taskset"])
-
 task_prerun = Signal(providing_args=["task_id", "task",
 task_prerun = Signal(providing_args=["task_id", "task",
                                      "args", "kwargs"])
                                      "args", "kwargs"])
-
 task_postrun = Signal(providing_args=["task_id", "task",
 task_postrun = Signal(providing_args=["task_id", "task",
                                       "args", "kwargs", "retval"])
                                       "args", "kwargs", "retval"])
-
 task_failure = Signal(providing_args=["task_id", "exception",
 task_failure = Signal(providing_args=["task_id", "exception",
                                       "args", "kwargs", "traceback",
                                       "args", "kwargs", "traceback",
                                       "einfo"])
                                       "einfo"])
@@ -290,6 +31,11 @@ worker_shutdown = Signal(providing_args=[])
 
 
 setup_logging = Signal(providing_args=["loglevel", "logfile",
 setup_logging = Signal(providing_args=["loglevel", "logfile",
                                        "format", "colorize"])
                                        "format", "colorize"])
+after_setup_logger = Signal(providing_args=["logger", "loglevel", "logfile",
+                                            "format", "colorize"])
+after_setup_task_logger = Signal(providing_args=["logger", "loglevel",
+                                                 "logfile", "format",
+                                                 "colorize"])
 
 
 beat_init = Signal(providing_args=[])
 beat_init = Signal(providing_args=[])
 beat_embedded_init = Signal(providing_args=[])
 beat_embedded_init = Signal(providing_args=[])

+ 2 - 5
celery/states.py

@@ -1,13 +1,10 @@
+# -*- coding: utf-8 -*-
 """
 """
 celery.states
 celery.states
 =============
 =============
 
 
 Built-in Task States.
 Built-in Task States.
 
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
-
-
 .. _states:
 .. _states:
 
 
 States
 States
@@ -15,7 +12,6 @@ States
 
 
 See :ref:`task-states`.
 See :ref:`task-states`.
 
 
-
 Sets
 Sets
 ----
 ----
 
 
@@ -59,6 +55,7 @@ Misc.
 -----
 -----
 
 
 """
 """
+from __future__ import absolute_import
 
 
 #: State precedence.
 #: State precedence.
 #: None represents the precedence of an unknown state.
 #: None represents the precedence of an unknown state.

+ 23 - 12
celery/task/__init__.py

@@ -1,14 +1,25 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
+"""
+    celery.task
+    ~~~~~~~~~~~
+
+    Creating tasks, subtasks, sets and chords.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
 import warnings
 import warnings
 
 
-from celery.app import app_or_default
-from celery.task.base import Task, PeriodicTask
-from celery.task.sets import TaskSet, subtask
-from celery.task.chords import chord
-from celery.task.control import discard_all
+from ..app import app_or_default
+from ..exceptions import CDeprecationWarning
 
 
-__all__ = ["Task", "TaskSet", "PeriodicTask", "subtask",
-           "discard_all", "chord"]
+from .base import Task, PeriodicTask  # noqa
+from .sets import TaskSet, subtask    # noqa
+from .chords import chord             # noqa
+from .control import discard_all      # noqa
 
 
 
 
 def task(*args, **kwargs):
 def task(*args, **kwargs):
@@ -18,7 +29,7 @@ def task(*args, **kwargs):
 
 
     .. code-block:: python
     .. code-block:: python
 
 
-        @task()
+        @task
         def refresh_feed(url):
         def refresh_feed(url):
             return Feed.objects.get(url=url).refresh()
             return Feed.objects.get(url=url).refresh()
 
 
@@ -51,7 +62,7 @@ def periodic_task(*args, **options):
 
 
             .. code-block:: python
             .. code-block:: python
 
 
-                @task()
+                @task
                 def refresh_feed(url):
                 def refresh_feed(url):
                     return Feed.objects.get(url=url).refresh()
                     return Feed.objects.get(url=url).refresh()
 
 
@@ -60,11 +71,11 @@ def periodic_task(*args, **options):
             .. code-block:: python
             .. code-block:: python
 
 
                 @task(exchange="feeds")
                 @task(exchange="feeds")
-                def refresh_feed(url, **kwargs):
+                def refresh_feed(url):
                     try:
                     try:
                         return Feed.objects.get(url=url).refresh()
                         return Feed.objects.get(url=url).refresh()
                     except socket.error, exc:
                     except socket.error, exc:
-                        refresh_feed.retry(args=[url], kwargs=kwargs, exc=exc)
+                        refresh_feed.retry(exc=exc)
 
 
             Calling the resulting task:
             Calling the resulting task:
 
 
@@ -95,7 +106,7 @@ def ping():  # ✞
     Please use :meth:`celery.task.control.ping` instead.
     Please use :meth:`celery.task.control.ping` instead.
 
 
     """
     """
-    warnings.warn(DeprecationWarning(
+    warnings.warn(CDeprecationWarning(
         "The ping task has been deprecated and will be removed in Celery "
         "The ping task has been deprecated and will be removed in Celery "
         "v2.3.  Please use inspect.ping instead."))
         "v2.3.  Please use inspect.ping instead."))
     return PingTask.apply_async().get()
     return PingTask.apply_async().get()

+ 17 - 5
celery/task/base.py

@@ -1,8 +1,20 @@
-from celery import current_app
-from celery.app.task import Context, TaskType, BaseTask  # noqa
-from celery.schedules import maybe_schedule
-from celery.utils import deprecated
-from celery.utils import timeutils
+# -*- coding: utf-8 -*-
+"""
+    celery.task.base
+    ~~~~~~~~~~~~~~~~
+
+    The task implementation has been moved to :mod:`celery.app.task`.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
+from .. import current_app
+from ..app.task import Context, TaskType, BaseTask  # noqa
+from ..schedules import maybe_schedule
+from ..utils import deprecated, timeutils
 
 
 Task = current_app.Task
 Task = current_app.Task
 
 

+ 35 - 17
celery/task/chords.py

@@ -1,16 +1,30 @@
-from kombu.utils import gen_unique_id
+# -*- coding: utf-8 -*-
+"""
+    celery.task.chords
+    ~~~~~~~~~~~~~~~~~~
 
 
-from celery import current_app
-from celery.result import TaskSetResult
-from celery.task.sets import TaskSet, subtask
+    Chords (task set callbacks).
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
+from .. import current_app
+from ..result import AsyncResult, TaskSetResult
+from ..utils import uuid
+
+from .sets import TaskSet, subtask
 
 
 
 
 @current_app.task(name="celery.chord_unlock", max_retries=None)
 @current_app.task(name="celery.chord_unlock", max_retries=None)
-def _unlock_chord(setid, callback, interval=1, max_retries=None):
-    result = TaskSetResult.restore(setid)
+def _unlock_chord(setid, callback, interval=1, propagate=False,
+        max_retries=None, result=None):
+    result = TaskSetResult(setid, map(AsyncResult, result))
     if result.ready():
     if result.ready():
-        subtask(callback).delay(result.join())
-        result.delete()
+        j = result.join_native if result.supports_native_join else result.join
+        subtask(callback).delay(j(propagate=propagate))
     else:
     else:
         _unlock_chord.retry(countdown=interval, max_retries=max_retries)
         _unlock_chord.retry(countdown=interval, max_retries=max_retries)
 
 
@@ -19,17 +33,21 @@ class Chord(current_app.Task):
     accept_magic_kwargs = False
     accept_magic_kwargs = False
     name = "celery.chord"
     name = "celery.chord"
 
 
-    def run(self, set, body, interval=1, max_retries=None, **kwargs):
+    def run(self, set, body, interval=1, max_retries=None,
+            propagate=False, **kwargs):
         if not isinstance(set, TaskSet):
         if not isinstance(set, TaskSet):
             set = TaskSet(set)
             set = TaskSet(set)
         r = []
         r = []
-        setid = gen_unique_id()
+        setid = uuid()
         for task in set.tasks:
         for task in set.tasks:
-            uuid = gen_unique_id()
-            task.options.update(task_id=uuid, chord=body)
-            r.append(current_app.AsyncResult(uuid))
-        current_app.TaskSetResult(setid, r).save()
-        self.backend.on_chord_apply(setid, body, interval, max_retries)
+            tid = uuid()
+            task.options.update(task_id=tid, chord=body)
+            r.append(current_app.AsyncResult(tid))
+        self.backend.on_chord_apply(setid, body,
+                                    interval=interval,
+                                    max_retries=max_retries,
+                                    propagate=propagate,
+                                    result=r)
         return set.apply_async(taskset_id=setid)
         return set.apply_async(taskset_id=setid)
 
 
 
 
@@ -41,7 +59,7 @@ class chord(object):
         self.options = options
         self.options = options
 
 
     def __call__(self, body, **options):
     def __call__(self, body, **options):
-        uuid = body.options.setdefault("task_id", gen_unique_id())
+        tid = body.options.setdefault("task_id", uuid())
         self.Chord.apply_async((list(self.tasks), body), self.options,
         self.Chord.apply_async((list(self.tasks), body), self.options,
                                 **options)
                                 **options)
-        return body.type.app.AsyncResult(uuid)
+        return body.type.app.AsyncResult(tid)

+ 22 - 5
celery/task/control.py

@@ -1,8 +1,21 @@
-from __future__ import absolute_import, with_statement
+# -*- coding: utf-8 -*-
+"""
+    celery.task.control
+    ~~~~~~~~~~~~~~~~~~~
+
+    Client for worker remote control commands.
+    Server implementation is in :mod:`celery.worker.control`.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+from __future__ import with_statement
 
 
 from kombu.pidbox import Mailbox
 from kombu.pidbox import Mailbox
 
 
-from celery.app import app_or_default
+from ..app import app_or_default
 
 
 
 
 def flatten_reply(reply):
 def flatten_reply(reply):
@@ -51,7 +64,7 @@ class Inspect(object):
     def revoked(self):
     def revoked(self):
         return self._request("dump_revoked")
         return self._request("dump_revoked")
 
 
-    def registered_tasks(self):
+    def registered(self):
         return self._request("dump_tasks")
         return self._request("dump_tasks")
 
 
     def enable_events(self):
     def enable_events(self):
@@ -75,6 +88,8 @@ class Inspect(object):
     def active_queues(self):
     def active_queues(self):
         return self._request("active_queues")
         return self._request("active_queues")
 
 
+    registered_tasks = registered
+
 
 
 class Control(object):
 class Control(object):
     Mailbox = Mailbox
     Mailbox = Mailbox
@@ -97,8 +112,8 @@ class Control(object):
 
 
         """
         """
         with self.app.default_connection(connection, connect_timeout) as conn:
         with self.app.default_connection(connection, connect_timeout) as conn:
-            with self.app.amqp.get_task_consumer(connection=conn) as consumer:
-                return consumer.discard_all()
+            return self.app.amqp.get_task_consumer(connection=conn)\
+                                .discard_all()
 
 
     def revoke(self, task_id, destination=None, terminate=False,
     def revoke(self, task_id, destination=None, terminate=False,
             signal="SIGTERM", **kwargs):
             signal="SIGTERM", **kwargs):
@@ -207,6 +222,8 @@ class Control(object):
 
 
         """
         """
         with self.app.default_connection(connection, connect_timeout) as conn:
         with self.app.default_connection(connection, connect_timeout) as conn:
+            if channel is None:
+                channel = conn.default_channel
             return self.mailbox(conn)._broadcast(command, arguments,
             return self.mailbox(conn)._broadcast(command, arguments,
                                                  destination, reply, timeout,
                                                  destination, reply, timeout,
                                                  limit, callback,
                                                  limit, callback,

+ 34 - 12
celery/task/http.py

@@ -1,16 +1,30 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.task.http
+    ~~~~~~~~~~~~~~~~
+
+    Task webhooks implementation.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
+import sys
 import urllib2
 import urllib2
 
 
 from urllib import urlencode
 from urllib import urlencode
 from urlparse import urlparse
 from urlparse import urlparse
 try:
 try:
     from urlparse import parse_qsl
     from urlparse import parse_qsl
-except ImportError:
+except ImportError:  # pragma: no cover
     from cgi import parse_qsl  # noqa
     from cgi import parse_qsl  # noqa
 
 
 from anyjson import deserialize
 from anyjson import deserialize
 
 
-from celery import __version__ as celery_version
-from celery.task.base import Task as BaseTask
+from .. import __version__ as celery_version
+from .base import Task as BaseTask
 
 
 GET_METHODS = frozenset(["GET", "HEAD"])
 GET_METHODS = frozenset(["GET", "HEAD"])
 
 
@@ -34,11 +48,19 @@ def maybe_utf8(value):
     return value
     return value
 
 
 
 
-def utf8dict(tup):
-    """With a dict's items() tuple return a new dict with any utf-8
-    keys/values encoded."""
-    return dict((key.encode("utf-8"), maybe_utf8(value))
-                    for key, value in tup)
+if sys.version_info >= (3, 0):
+
+    def utf8dict(tup):
+        if not isinstance(tup, dict):
+            return dict(tup)
+        return tup
+else:
+
+    def utf8dict(tup):  # noqa
+        """With a dict's items() tuple return a new dict with any utf-8
+        keys/values encoded."""
+        return dict((key.encode("utf-8"), maybe_utf8(value))
+                        for key, value in tup)
 
 
 
 
 def extract_response(raw_response):
 def extract_response(raw_response):
@@ -119,8 +141,9 @@ class HttpDispatch(object):
 
 
     def make_request(self, url, method, params):
     def make_request(self, url, method, params):
         """Makes an HTTP request and returns the response."""
         """Makes an HTTP request and returns the response."""
-        request = urllib2.Request(url, params, headers=self.http_headers)
-        request.headers.update(self.http_headers)
+        request = urllib2.Request(url, params)
+        for key, val in self.http_headers.items():
+            request.add_header(key, val)
         response = urllib2.urlopen(request)         # user catches errors.
         response = urllib2.urlopen(request)         # user catches errors.
         return response.read()
         return response.read()
 
 
@@ -137,8 +160,7 @@ class HttpDispatch(object):
 
 
     @property
     @property
     def http_headers(self):
     def http_headers(self):
-        headers = {"Content-Type": "application/json",
-                   "User-Agent": self.user_agent}
+        headers = {"User-Agent": self.user_agent}
         return headers
         return headers
 
 
 
 

+ 6 - 4
celery/task/schedules.py

@@ -1,7 +1,9 @@
-import warnings
-from celery.schedules import schedule, crontab_parser, crontab
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
 
 
-__all__ = ["schedule", "crontab_parser", "crontab"]
+import warnings
+from ..schedules import schedule, crontab_parser, crontab  # noqa
+from ..exceptions import CDeprecationWarning
 
 
-warnings.warn(DeprecationWarning(
+warnings.warn(CDeprecationWarning(
     "celery.task.schedules is deprecated and renamed to celery.schedules"))
     "celery.task.schedules is deprecated and renamed to celery.schedules"))

+ 34 - 22
celery/task/sets.py

@@ -1,14 +1,25 @@
-from __future__ import absolute_import, with_statement
+# -*- coding: utf-8 -*-
+"""
+    celery.task.sets
+    ~~~~~~~~~~~~~~~~
 
 
-import warnings
+    Creating and applying groups of tasks.
 
 
-from kombu.utils import cached_property
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 
-from celery import registry
-from celery.app import app_or_default
-from celery.datastructures import AttributeDict
-from celery.utils import gen_unique_id
-from celery.utils.compat import UserList
+"""
+from __future__ import absolute_import
+from __future__ import with_statement
+
+import warnings
+
+from .. import registry
+from ..app import app_or_default
+from ..datastructures import AttributeDict
+from ..exceptions import CDeprecationWarning
+from ..utils import cached_property, reprcall, uuid
+from ..utils.compat import UserList
 
 
 TASKSET_DEPRECATION_TEXT = """\
 TASKSET_DEPRECATION_TEXT = """\
 Using this invocation of TaskSet is deprecated and will be removed
 Using this invocation of TaskSet is deprecated and will be removed
@@ -17,7 +28,7 @@ in Celery v2.4!
 TaskSets now supports multiple types of tasks, the API has to reflect
 TaskSets now supports multiple types of tasks, the API has to reflect
 this so the syntax has been changed to:
 this so the syntax has been changed to:
 
 
-    from celery.task.sets import TaskSet
+    from celery.task import TaskSet
 
 
     ts = TaskSet(tasks=[
     ts = TaskSet(tasks=[
             %(cls)s.subtask(args1, kwargs1, options1),
             %(cls)s.subtask(args1, kwargs1, options1),
@@ -92,24 +103,25 @@ class subtask(AttributeDict):
         options = dict(self.options, **options)
         options = dict(self.options, **options)
         return self.type.apply_async(args, kwargs, **options)
         return self.type.apply_async(args, kwargs, **options)
 
 
-    def get_type(self):
-        return self.type
-
     def __reduce__(self):
     def __reduce__(self):
         # for serialization, the task type is lazily loaded,
         # for serialization, the task type is lazily loaded,
         # and not stored in the dict itself.
         # and not stored in the dict itself.
         return (self.__class__, (dict(self), ), None)
         return (self.__class__, (dict(self), ), None)
 
 
-    def __repr__(self, kwformat=lambda i: "%s=%r" % i, sep=', '):
-        kw = self["kwargs"]
-        return "%s(%s%s%s)" % (self["task"], sep.join(map(repr, self["args"])),
-                kw and sep or "", sep.join(map(kwformat, kw.iteritems())))
+    def __repr__(self):
+        return reprcall(self["task"], self["args"], self["kwargs"])
 
 
     @cached_property
     @cached_property
     def type(self):
     def type(self):
         return registry.tasks[self.task]
         return registry.tasks[self.task]
 
 
 
 
+def maybe_subtask(t):
+    if not isinstance(t, subtask):
+        return subtask(t)
+    return t
+
+
 class TaskSet(UserList):
 class TaskSet(UserList):
     """A task containing several subtasks, making it possible
     """A task containing several subtasks, making it possible
     to track how many, or when all of the tasks have been completed.
     to track how many, or when all of the tasks have been completed.
@@ -134,7 +146,7 @@ class TaskSet(UserList):
         self.app = app_or_default(app)
         self.app = app_or_default(app)
         if task is not None:
         if task is not None:
             if hasattr(task, "__iter__"):
             if hasattr(task, "__iter__"):
-                tasks = task
+                tasks = [maybe_subtask(t) for t in task]
             else:
             else:
                 # Previously TaskSet only supported applying one kind of task.
                 # Previously TaskSet only supported applying one kind of task.
                 # the signature then was TaskSet(task, arglist),
                 # the signature then was TaskSet(task, arglist),
@@ -144,7 +156,7 @@ class TaskSet(UserList):
                 self._task_name = task.name
                 self._task_name = task.name
                 warnings.warn(TASKSET_DEPRECATION_TEXT % {
                 warnings.warn(TASKSET_DEPRECATION_TEXT % {
                                 "cls": task.__class__.__name__},
                                 "cls": task.__class__.__name__},
-                              DeprecationWarning)
+                              CDeprecationWarning)
         self.data = list(tasks or [])
         self.data = list(tasks or [])
         self.total = len(self.tasks)
         self.total = len(self.tasks)
         self.Publisher = Publisher or self.app.amqp.TaskPublisher
         self.Publisher = Publisher or self.app.amqp.TaskPublisher
@@ -158,7 +170,7 @@ class TaskSet(UserList):
             return self.apply(taskset_id=taskset_id)
             return self.apply(taskset_id=taskset_id)
 
 
         with app.default_connection(connection, connect_timeout) as conn:
         with app.default_connection(connection, connect_timeout) as conn:
-            setid = taskset_id or gen_unique_id()
+            setid = taskset_id or uuid()
             pub = publisher or self.Publisher(connection=conn)
             pub = publisher or self.Publisher(connection=conn)
             try:
             try:
                 results = self._async_results(setid, pub)
                 results = self._async_results(setid, pub)
@@ -174,7 +186,7 @@ class TaskSet(UserList):
 
 
     def apply(self, taskset_id=None):
     def apply(self, taskset_id=None):
         """Applies the taskset locally by blocking until all tasks return."""
         """Applies the taskset locally by blocking until all tasks return."""
-        setid = taskset_id or gen_unique_id()
+        setid = taskset_id or uuid()
         return self.app.TaskSetResult(setid, self._sync_results(setid))
         return self.app.TaskSetResult(setid, self._sync_results(setid))
 
 
     def _sync_results(self, taskset_id):
     def _sync_results(self, taskset_id):
@@ -188,12 +200,12 @@ class TaskSet(UserList):
     def task(self):
     def task(self):
         warnings.warn(
         warnings.warn(
             "TaskSet.task is deprecated and will be removed in 1.4",
             "TaskSet.task is deprecated and will be removed in 1.4",
-            DeprecationWarning)
+            CDeprecationWarning)
         return self._task
         return self._task
 
 
     @property
     @property
     def task_name(self):
     def task_name(self):
         warnings.warn(
         warnings.warn(
             "TaskSet.task_name is deprecated and will be removed in 1.4",
             "TaskSet.task_name is deprecated and will be removed in 1.4",
-            DeprecationWarning)
+            CDeprecationWarning)
         return self._task_name
         return self._task_name

+ 3 - 0
celery/tests/__init__.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import logging
 import logging
 import os
 import os
 import sys
 import sys
@@ -11,6 +13,7 @@ os.environ.setdefault("CELERY_CONFIG_MODULE", config_module)
 os.environ["CELERY_LOADER"] = "default"
 os.environ["CELERY_LOADER"] = "default"
 os.environ["EVENTLET_NOPATCH"] = "yes"
 os.environ["EVENTLET_NOPATCH"] = "yes"
 os.environ["GEVENT_NOPATCH"] = "yes"
 os.environ["GEVENT_NOPATCH"] = "yes"
+os.environ["KOMBU_DISABLE_LIMIT_PROTECTION"] = "yes"
 
 
 try:
 try:
     WindowsError = WindowsError  # noqa
     WindowsError = WindowsError  # noqa

+ 2 - 0
celery/tests/compat.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import sys
 import sys
 
 
 
 

+ 6 - 4
celery/tests/config.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import os
 import os
 
 
 BROKER_TRANSPORT = "memory"
 BROKER_TRANSPORT = "memory"
@@ -22,7 +24,7 @@ TT_HOST = os.environ.get("TT_HOST") or "localhost"
 TT_PORT = int(os.environ.get("TT_PORT") or 1978)
 TT_PORT = int(os.environ.get("TT_PORT") or 1978)
 
 
 # Redis results tests (only executed if installed and running)
 # Redis results tests (only executed if installed and running)
-REDIS_HOST = os.environ.get("REDIS_HOST") or "localhost"
-REDIS_PORT = int(os.environ.get("REDIS_PORT") or 6379)
-REDIS_DB = os.environ.get("REDIS_DB") or 0
-REDIS_PASSWORD = os.environ.get("REDIS_PASSWORD")
+CELERY_REDIS_HOST = os.environ.get("REDIS_HOST") or "localhost"
+CELERY_REDIS_PORT = int(os.environ.get("REDIS_PORT") or 6379)
+CELERY_REDIS_DB = os.environ.get("REDIS_DB") or 0
+CELERY_REDIS_PASSWORD = os.environ.get("REDIS_PASSWORD")

+ 2 - 0
celery/tests/functional/case.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import atexit
 import atexit
 import logging
 import logging
 import os
 import os

+ 2 - 0
celery/tests/functional/tasks.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import time
 import time
 
 
 from celery.task import task
 from celery.task import task

+ 324 - 0
celery/tests/test_app/__init__.py

@@ -0,0 +1,324 @@
+from __future__ import absolute_import
+from __future__ import with_statement
+
+import os
+import sys
+
+from mock import Mock
+
+from celery import Celery
+from celery import app as _app
+from celery.app import defaults
+from celery.app.base import BaseApp
+from celery.loaders.base import BaseLoader
+from celery.platforms import pyimplementation
+from celery.utils.serialization import pickle
+
+from celery.tests import config
+from celery.tests.utils import (unittest, mask_modules, platform_pyimp,
+                                sys_platform, pypy_version)
+from celery.utils.mail import ErrorMail
+from kombu.utils import gen_unique_id
+
+THIS_IS_A_KEY = "this is a value"
+
+
+class Object(object):
+
+    def __init__(self, **kwargs):
+        for key, value in kwargs.items():
+            setattr(self, key, value)
+
+
+def _get_test_config():
+    return dict((key, getattr(config, key))
+                    for key in dir(config)
+                        if key.isupper() and not key.startswith("_"))
+
+test_config = _get_test_config()
+
+
+class test_App(unittest.TestCase):
+
+    def setUp(self):
+        self.app = Celery(set_as_current=False)
+        self.app.conf.update(test_config)
+
+    def test_task(self):
+        app = Celery("foozibari", set_as_current=False)
+
+        def fun():
+            pass
+
+        fun.__module__ = "__main__"
+        task = app.task(fun)
+        self.assertEqual(task.name, app.main + ".fun")
+
+    def test_repr(self):
+        self.assertTrue(repr(self.app))
+
+    def test_TaskSet(self):
+        ts = self.app.TaskSet()
+        self.assertListEqual(ts.tasks, [])
+        self.assertIs(ts.app, self.app)
+
+    def test_pickle_app(self):
+        changes = dict(THE_FOO_BAR="bars",
+                       THE_MII_MAR="jars")
+        self.app.conf.update(changes)
+        saved = pickle.dumps(self.app)
+        self.assertLess(len(saved), 2048)
+        restored = pickle.loads(saved)
+        self.assertDictContainsSubset(changes, restored.conf)
+
+    def test_worker_main(self):
+        from celery.bin import celeryd
+
+        class WorkerCommand(celeryd.WorkerCommand):
+
+            def execute_from_commandline(self, argv):
+                return argv
+
+        prev, celeryd.WorkerCommand = celeryd.WorkerCommand, WorkerCommand
+        try:
+            ret = self.app.worker_main(argv=["--version"])
+            self.assertListEqual(ret, ["--version"])
+        finally:
+            celeryd.WorkerCommand = prev
+
+    def test_config_from_envvar(self):
+        os.environ["CELERYTEST_CONFIG_OBJECT"] = "celery.tests.test_app"
+        self.app.config_from_envvar("CELERYTEST_CONFIG_OBJECT")
+        self.assertEqual(self.app.conf.THIS_IS_A_KEY, "this is a value")
+
+    def test_config_from_object(self):
+
+        class Object(object):
+            LEAVE_FOR_WORK = True
+            MOMENT_TO_STOP = True
+            CALL_ME_BACK = 123456789
+            WANT_ME_TO = False
+            UNDERSTAND_ME = True
+
+        self.app.config_from_object(Object())
+
+        self.assertTrue(self.app.conf.LEAVE_FOR_WORK)
+        self.assertTrue(self.app.conf.MOMENT_TO_STOP)
+        self.assertEqual(self.app.conf.CALL_ME_BACK, 123456789)
+        self.assertFalse(self.app.conf.WANT_ME_TO)
+        self.assertTrue(self.app.conf.UNDERSTAND_ME)
+
+    def test_config_from_cmdline(self):
+        cmdline = [".always_eager=no",
+                   ".result_backend=/dev/null",
+                   '.task_error_whitelist=(list)["a", "b", "c"]',
+                   "celeryd.prefetch_multiplier=368",
+                   ".foobarstring=(string)300",
+                   ".foobarint=(int)300",
+                   '.result_engine_options=(dict){"foo": "bar"}']
+        self.app.config_from_cmdline(cmdline, namespace="celery")
+        self.assertFalse(self.app.conf.CELERY_ALWAYS_EAGER)
+        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "/dev/null")
+        self.assertEqual(self.app.conf.CELERYD_PREFETCH_MULTIPLIER, 368)
+        self.assertListEqual(self.app.conf.CELERY_TASK_ERROR_WHITELIST,
+                             ["a", "b", "c"])
+        self.assertEqual(self.app.conf.CELERY_FOOBARSTRING, "300")
+        self.assertEqual(self.app.conf.CELERY_FOOBARINT, 300)
+        self.assertDictEqual(self.app.conf.CELERY_RESULT_ENGINE_OPTIONS,
+                             {"foo": "bar"})
+
+    def test_compat_setting_CELERY_BACKEND(self):
+
+        self.app.config_from_object(Object(CELERY_BACKEND="set_by_us"))
+        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "set_by_us")
+
+    def test_setting_BROKER_TRANSPORT_OPTIONS(self):
+
+        _args = {'foo': 'bar', 'spam': 'baz'}
+
+        self.app.config_from_object(Object())
+        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, {})
+
+        self.app.config_from_object(Object(BROKER_TRANSPORT_OPTIONS=_args))
+        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, _args)
+
+    def test_Windows_log_color_disabled(self):
+        self.app.IS_WINDOWS = True
+        self.assertFalse(self.app.log.supports_color())
+
+    def test_compat_setting_CARROT_BACKEND(self):
+        self.app.config_from_object(Object(CARROT_BACKEND="set_by_us"))
+        self.assertEqual(self.app.conf.BROKER_TRANSPORT, "set_by_us")
+
+    def test_mail_admins(self):
+
+        class Loader(BaseLoader):
+
+            def mail_admins(*args, **kwargs):
+                return args, kwargs
+
+        self.app.loader = Loader()
+        self.app.conf.ADMINS = None
+        self.assertFalse(self.app.mail_admins("Subject", "Body"))
+        self.app.conf.ADMINS = [("George Costanza", "george@vandelay.com")]
+        self.assertTrue(self.app.mail_admins("Subject", "Body"))
+
+    def test_amqp_get_broker_info(self):
+        self.assertDictContainsSubset({"hostname": "localhost",
+                                       "userid": "guest",
+                                       "password": "guest",
+                                       "virtual_host": "/"},
+                                      self.app.broker_connection(
+                                          transport="amqplib").info())
+        self.app.conf.BROKER_PORT = 1978
+        self.app.conf.BROKER_VHOST = "foo"
+        self.assertDictContainsSubset({"port": 1978,
+                                       "virtual_host": "foo"},
+                                      self.app.broker_connection(
+                                          transport="amqplib").info())
+        conn = self.app.broker_connection(virtual_host="/value")
+        self.assertDictContainsSubset({"virtual_host": "/value"},
+                                      conn.info())
+
+    def test_BROKER_BACKEND_alias(self):
+        self.assertEqual(self.app.conf.BROKER_BACKEND,
+                         self.app.conf.BROKER_TRANSPORT)
+
+    def test_with_default_connection(self):
+
+        @self.app.with_default_connection
+        def handler(connection=None, foo=None):
+            return connection, foo
+
+        connection, foo = handler(foo=42)
+        self.assertEqual(foo, 42)
+        self.assertTrue(connection)
+
+    def test_after_fork(self):
+        p = self.app._pool = Mock()
+        self.app._after_fork(self.app)
+        p.force_close_all.assert_called_with()
+        self.assertIsNone(self.app._pool)
+        self.app._after_fork(self.app)
+
+    def test_pool_no_multiprocessing(self):
+        with mask_modules("multiprocessing.util"):
+            pool = self.app.pool
+            self.assertIs(pool, self.app._pool)
+
+    def test_bugreport(self):
+        self.assertTrue(self.app.bugreport())
+
+    def test_send_task_sent_event(self):
+        from celery.app import amqp
+
+        class Dispatcher(object):
+            sent = []
+
+            def send(self, type, **fields):
+                self.sent.append((type, fields))
+
+        conn = self.app.broker_connection()
+        chan = conn.channel()
+        try:
+            for e in ("foo_exchange", "moo_exchange", "bar_exchange"):
+                chan.exchange_declare(e, "direct", durable=True)
+                chan.queue_declare(e, durable=True)
+                chan.queue_bind(e, e, e)
+        finally:
+            chan.close()
+        assert conn.transport_cls == "memory"
+
+        pub = self.app.amqp.TaskPublisher(conn, exchange="foo_exchange")
+        self.assertIn("foo_exchange", amqp._exchanges_declared)
+
+        dispatcher = Dispatcher()
+        self.assertTrue(pub.delay_task("footask", (), {},
+                                       exchange="moo_exchange",
+                                       routing_key="moo_exchange",
+                                       event_dispatcher=dispatcher))
+        self.assertTrue(dispatcher.sent)
+        self.assertEqual(dispatcher.sent[0][0], "task-sent")
+        self.assertTrue(pub.delay_task("footask", (), {},
+                                       event_dispatcher=dispatcher,
+                                       exchange="bar_exchange",
+                                       routing_key="bar_exchange"))
+        self.assertIn("bar_exchange", amqp._exchanges_declared)
+
+    def test_error_mail_sender(self):
+        x = ErrorMail.subject % {"name": "task_name",
+                                 "id": gen_unique_id(),
+                                 "exc": "FOOBARBAZ",
+                                 "hostname": "lana"}
+        self.assertTrue(x)
+
+
+class test_BaseApp(unittest.TestCase):
+
+    def test_on_init(self):
+        BaseApp()
+
+
+class test_defaults(unittest.TestCase):
+
+    def test_str_to_bool(self):
+        for s in ("false", "no", "0"):
+            self.assertFalse(defaults.str_to_bool(s))
+        for s in ("true", "yes", "1"):
+            self.assertTrue(defaults.str_to_bool(s))
+        with self.assertRaises(TypeError):
+            defaults.str_to_bool("unsure")
+
+
+class test_debugging_utils(unittest.TestCase):
+
+    def test_enable_disable_trace(self):
+        try:
+            _app.enable_trace()
+            self.assertEqual(_app.app_or_default, _app._app_or_default_trace)
+            _app.disable_trace()
+            self.assertEqual(_app.app_or_default, _app._app_or_default)
+        finally:
+            _app.disable_trace()
+
+
+class test_compilation(unittest.TestCase):
+    _clean = ("celery.app.base", )
+
+    def setUp(self):
+        self._prev = dict((k, sys.modules.pop(k, None)) for k in self._clean)
+
+    def tearDown(self):
+        sys.modules.update(self._prev)
+
+    def test_kombu_version_check(self):
+        import kombu
+        kombu.VERSION = (0, 9, 9)
+        with self.assertRaises(ImportError):
+            __import__("celery.app.base")
+
+
+class test_pyimplementation(unittest.TestCase):
+
+    def test_platform_python_implementation(self):
+        with platform_pyimp(lambda: "Xython"):
+            self.assertEqual(pyimplementation(), "Xython")
+
+    def test_platform_jython(self):
+        with platform_pyimp():
+            with sys_platform("java 1.6.51"):
+                self.assertIn("Jython", pyimplementation())
+
+    def test_platform_pypy(self):
+        with platform_pyimp():
+            with sys_platform("darwin"):
+                with pypy_version((1, 4, 3)):
+                    self.assertIn("PyPy", pyimplementation())
+                with pypy_version((1, 4, 3, "a4")):
+                    self.assertIn("PyPy", pyimplementation())
+
+    def test_platform_fallback(self):
+        with platform_pyimp():
+            with sys_platform("darwin"):
+                with pypy_version():
+                    self.assertEqual("CPython", pyimplementation())

+ 0 - 218
celery/tests/test_app/test_app.py

@@ -1,218 +0,0 @@
-import os
-
-from celery import Celery
-from celery.app import defaults
-from celery.app.base import BaseApp
-from celery.loaders.base import BaseLoader
-from celery.utils.serialization import pickle
-
-from celery.tests import config
-from celery.tests.utils import unittest
-
-THIS_IS_A_KEY = "this is a value"
-
-
-class Object(object):
-
-    def __init__(self, **kwargs):
-        for key, value in kwargs.items():
-            setattr(self, key, value)
-
-
-def _get_test_config():
-    return dict((key, getattr(config, key))
-                    for key in dir(config)
-                        if key.isupper() and not key.startswith("_"))
-
-test_config = _get_test_config()
-
-
-class test_App(unittest.TestCase):
-
-    def setUp(self):
-        self.app = Celery(set_as_current=False)
-        self.app.conf.update(test_config)
-
-    def test_task(self):
-        app = Celery("foozibari", set_as_current=False)
-
-        def fun():
-            pass
-
-        fun.__module__ = "__main__"
-        task = app.task(fun)
-        self.assertEqual(task.name, app.main + ".fun")
-
-    def test_TaskSet(self):
-        ts = self.app.TaskSet()
-        self.assertListEqual(ts.tasks, [])
-        self.assertIs(ts.app, self.app)
-
-    def test_pickle_app(self):
-        changes = dict(THE_FOO_BAR="bars",
-                       THE_MII_MAR="jars")
-        self.app.conf.update(changes)
-        saved = pickle.dumps(self.app)
-        self.assertLess(len(saved), 2048)
-        restored = pickle.loads(saved)
-        self.assertDictContainsSubset(changes, restored.conf)
-
-    def test_worker_main(self):
-        from celery.bin import celeryd
-
-        class WorkerCommand(celeryd.WorkerCommand):
-
-            def execute_from_commandline(self, argv):
-                return argv
-
-        prev, celeryd.WorkerCommand = celeryd.WorkerCommand, WorkerCommand
-        try:
-            ret = self.app.worker_main(argv=["--version"])
-            self.assertListEqual(ret, ["--version"])
-        finally:
-            celeryd.WorkerCommand = prev
-
-    def test_config_from_envvar(self):
-        os.environ["CELERYTEST_CONFIG_OBJECT"] = \
-                "celery.tests.test_app.test_app"
-        self.app.config_from_envvar("CELERYTEST_CONFIG_OBJECT")
-        self.assertEqual(self.app.conf.THIS_IS_A_KEY, "this is a value")
-
-    def test_config_from_object(self):
-
-        class Object(object):
-            LEAVE_FOR_WORK = True
-            MOMENT_TO_STOP = True
-            CALL_ME_BACK = 123456789
-            WANT_ME_TO = False
-            UNDERSTAND_ME = True
-
-        self.app.config_from_object(Object())
-
-        self.assertTrue(self.app.conf.LEAVE_FOR_WORK)
-        self.assertTrue(self.app.conf.MOMENT_TO_STOP)
-        self.assertEqual(self.app.conf.CALL_ME_BACK, 123456789)
-        self.assertFalse(self.app.conf.WANT_ME_TO)
-        self.assertTrue(self.app.conf.UNDERSTAND_ME)
-
-    def test_config_from_cmdline(self):
-        cmdline = [".always_eager=no",
-                   ".result_backend=/dev/null",
-                   '.task_error_whitelist=(list)["a", "b", "c"]',
-                   "celeryd.prefetch_multiplier=368",
-                   ".foobarstring=(string)300",
-                   ".foobarint=(int)300",
-                   '.result_engine_options=(dict){"foo": "bar"}']
-        self.app.config_from_cmdline(cmdline, namespace="celery")
-        self.assertFalse(self.app.conf.CELERY_ALWAYS_EAGER)
-        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "/dev/null")
-        self.assertEqual(self.app.conf.CELERYD_PREFETCH_MULTIPLIER, 368)
-        self.assertListEqual(self.app.conf.CELERY_TASK_ERROR_WHITELIST,
-                             ["a", "b", "c"])
-        self.assertEqual(self.app.conf.CELERY_FOOBARSTRING, "300")
-        self.assertEqual(self.app.conf.CELERY_FOOBARINT, 300)
-        self.assertDictEqual(self.app.conf.CELERY_RESULT_ENGINE_OPTIONS,
-                             {"foo": "bar"})
-
-    def test_compat_setting_CELERY_BACKEND(self):
-
-        self.app.config_from_object(Object(CELERY_BACKEND="set_by_us"))
-        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "set_by_us")
-
-    def test_setting_BROKER_TRANSPORT_OPTIONS(self):
-
-        _args = {'foo': 'bar', 'spam': 'baz'}
-
-        self.app.config_from_object(Object())
-        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, {})
-
-        self.app.config_from_object(Object(BROKER_TRANSPORT_OPTIONS=_args))
-        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, _args)
-
-    def test_Windows_log_color_disabled(self):
-        self.app.IS_WINDOWS = True
-        self.assertFalse(self.app.log.supports_color())
-
-    def test_compat_setting_CARROT_BACKEND(self):
-        self.app.config_from_object(Object(CARROT_BACKEND="set_by_us"))
-        self.assertEqual(self.app.conf.BROKER_TRANSPORT, "set_by_us")
-
-    def test_mail_admins(self):
-
-        class Loader(BaseLoader):
-
-            def mail_admins(*args, **kwargs):
-                return args, kwargs
-
-        self.app.loader = Loader()
-        self.app.conf.ADMINS = None
-        self.assertFalse(self.app.mail_admins("Subject", "Body"))
-        self.app.conf.ADMINS = [("George Costanza", "george@vandelay.com")]
-        self.assertTrue(self.app.mail_admins("Subject", "Body"))
-
-    def test_amqp_get_broker_info(self):
-        self.assertDictContainsSubset({"hostname": "localhost",
-                                       "userid": "guest",
-                                       "password": "guest",
-                                       "virtual_host": "/"},
-                                      self.app.broker_connection().info())
-        self.app.conf.BROKER_PORT = 1978
-        self.app.conf.BROKER_VHOST = "foo"
-        self.assertDictContainsSubset({"port": 1978,
-                                       "virtual_host": "foo"},
-                                      self.app.broker_connection().info())
-        conn = self.app.broker_connection(virtual_host="/value")
-        self.assertDictContainsSubset({"virtual_host": "/value"},
-                                      conn.info())
-
-    def test_send_task_sent_event(self):
-        from celery.app import amqp
-
-        class Dispatcher(object):
-            sent = []
-
-            def send(self, type, **fields):
-                self.sent.append((type, fields))
-
-        conn = self.app.broker_connection()
-        chan = conn.channel()
-        try:
-            for e in ("foo_exchange", "moo_exchange", "bar_exchange"):
-                chan.exchange_declare(e, "direct", durable=True)
-                chan.queue_declare(e, durable=True)
-                chan.queue_bind(e, e, e)
-        finally:
-            chan.close()
-        assert conn.transport_cls == "memory"
-
-        pub = self.app.amqp.TaskPublisher(conn, exchange="foo_exchange")
-        self.assertIn("foo_exchange", amqp._exchanges_declared)
-
-        dispatcher = Dispatcher()
-        self.assertTrue(pub.delay_task("footask", (), {},
-                                       exchange="moo_exchange",
-                                       routing_key="moo_exchange",
-                                       event_dispatcher=dispatcher))
-        self.assertTrue(dispatcher.sent)
-        self.assertEqual(dispatcher.sent[0][0], "task-sent")
-        self.assertTrue(pub.delay_task("footask", (), {},
-                                       event_dispatcher=dispatcher,
-                                       exchange="bar_exchange",
-                                       routing_key="bar_exchange"))
-        self.assertIn("bar_exchange", amqp._exchanges_declared)
-
-
-class test_BaseApp(unittest.TestCase):
-
-    def test_on_init(self):
-        BaseApp()
-
-
-class test_defaults(unittest.TestCase):
-
-    def test_str_to_bool(self):
-        for s in ("false", "no", "0"):
-            self.assertFalse(defaults.str_to_bool(s))
-        for s in ("true", "yes", "1"):
-            self.assertTrue(defaults.str_to_bool(s))
-        self.assertRaises(TypeError, defaults.str_to_bool, "unsure")

+ 90 - 2
celery/tests/test_app/test_app_amqp.py

@@ -1,9 +1,13 @@
-from celery.tests.utils import unittest
+from __future__ import absolute_import
+from __future__ import with_statement
+
+from mock import Mock
 
 
 from celery.app.amqp import MSG_OPTIONS, extract_msg_options
 from celery.app.amqp import MSG_OPTIONS, extract_msg_options
+from celery.tests.utils import AppCase
 
 
 
 
-class TestMsgOptions(unittest.TestCase):
+class TestMsgOptions(AppCase):
 
 
     def test_MSG_OPTIONS(self):
     def test_MSG_OPTIONS(self):
         self.assertTrue(MSG_OPTIONS)
         self.assertTrue(MSG_OPTIONS)
@@ -13,3 +17,87 @@ class TestMsgOptions(unittest.TestCase):
         result = extract_msg_options(testing)
         result = extract_msg_options(testing)
         self.assertEqual(result["mandatory"], True)
         self.assertEqual(result["mandatory"], True)
         self.assertEqual(result["routing_key"], "foo.xuzzy")
         self.assertEqual(result["routing_key"], "foo.xuzzy")
+
+
+class test_TaskPublisher(AppCase):
+
+    def test__exit__(self):
+
+        publisher = self.app.amqp.TaskPublisher(self.app.broker_connection())
+        publisher.release = Mock()
+        with publisher:
+            pass
+        publisher.release.assert_called_with()
+
+    def test_ensure_declare_queue(self, q="x1242112"):
+        publisher = self.app.amqp.TaskPublisher(Mock())
+        self.app.amqp.queues.add(q, q, q)
+        publisher._declare_queue(q, retry=True)
+        self.assertTrue(publisher.connection.ensure.call_count)
+
+    def test_ensure_declare_exchange(self, e="x9248311"):
+        publisher = self.app.amqp.TaskPublisher(Mock())
+        publisher._declare_exchange(e, "direct", retry=True)
+        self.assertTrue(publisher.connection.ensure.call_count)
+
+    def test_retry_policy(self):
+        pub = self.app.amqp.TaskPublisher(Mock())
+        pub.delay_task("tasks.add", (2, 2), {},
+                       retry_policy={"frobulate": 32.4})
+
+    def test_publish_no_retry(self):
+        pub = self.app.amqp.TaskPublisher(Mock())
+        pub.delay_task("tasks.add", (2, 2), {}, retry=False, chord=123)
+        self.assertFalse(pub.connection.ensure.call_count)
+
+
+class test_PublisherPool(AppCase):
+
+    def test_setup_nolimit(self):
+        L = self.app.conf.BROKER_POOL_LIMIT
+        self.app.conf.BROKER_POOL_LIMIT = None
+        try:
+            delattr(self.app, "_pool")
+        except AttributeError:
+            pass
+        self.app.amqp.__dict__.pop("publisher_pool", None)
+        try:
+            pool = self.app.amqp.publisher_pool
+            self.assertEqual(pool.limit, self.app.pool.limit)
+            self.assertFalse(pool._resource.queue)
+
+            r1 = pool.acquire()
+            r2 = pool.acquire()
+            r1.release()
+            r2.release()
+            r1 = pool.acquire()
+            r2 = pool.acquire()
+        finally:
+            self.app.conf.BROKER_POOL_LIMIT = L
+
+    def test_setup(self):
+        L = self.app.conf.BROKER_POOL_LIMIT
+        self.app.conf.BROKER_POOL_LIMIT = 2
+        try:
+            delattr(self.app, "_pool")
+        except AttributeError:
+            pass
+        self.app.amqp.__dict__.pop("publisher_pool", None)
+        try:
+            pool = self.app.amqp.publisher_pool
+            self.assertEqual(pool.limit, self.app.pool.limit)
+            self.assertTrue(pool._resource.queue)
+
+            p1 = r1 = pool.acquire()
+            p2 = r2 = pool.acquire()
+            delattr(r1.connection, "_producer_chan")
+            r1.release()
+            r2.release()
+            r1 = pool.acquire()
+            r2 = pool.acquire()
+            self.assertIs(p2, r1)
+            self.assertIs(p1, r2)
+            r1.release()
+            r2.release()
+        finally:
+            self.app.conf.BROKER_POOL_LIMIT = L

+ 36 - 0
celery/tests/test_app/test_app_defaults.py

@@ -0,0 +1,36 @@
+from __future__ import absolute_import
+from __future__ import with_statement
+
+import sys
+
+from importlib import import_module
+
+from celery.tests.utils import unittest, pypy_version, sys_platform
+
+
+class test_defaults(unittest.TestCase):
+
+    def setUp(self):
+        self._prev = sys.modules.pop("celery.app.defaults", None)
+
+    def tearDown(self):
+        if self._prev:
+            sys.modules["celery.app.defaults"] = self._prev
+
+    def test_default_pool_pypy_14(self):
+        with sys_platform("darwin"):
+            with pypy_version((1, 4, 0)):
+                self.assertEqual(self.defaults.DEFAULT_POOL, "solo")
+
+    def test_default_pool_pypy_15(self):
+        with sys_platform("darwin"):
+            with pypy_version((1, 5, 0)):
+                self.assertEqual(self.defaults.DEFAULT_POOL, "processes")
+
+    def test_default_pool_jython(self):
+        with sys_platform("java 1.6.51"):
+            self.assertEqual(self.defaults.DEFAULT_POOL, "threads")
+
+    @property
+    def defaults(self):
+        return import_module("celery.app.defaults")

+ 16 - 15
celery/tests/test_app/test_beat.py

@@ -1,8 +1,8 @@
+from __future__ import absolute_import
+
 import logging
 import logging
-from celery.tests.utils import unittest
 
 
 from datetime import datetime, timedelta
 from datetime import datetime, timedelta
-
 from nose import SkipTest
 from nose import SkipTest
 
 
 from celery import beat
 from celery import beat
@@ -10,7 +10,8 @@ from celery import registry
 from celery.result import AsyncResult
 from celery.result import AsyncResult
 from celery.schedules import schedule
 from celery.schedules import schedule
 from celery.task.base import Task
 from celery.task.base import Task
-from celery.utils import gen_unique_id
+from celery.utils import uuid
+from celery.tests.utils import unittest
 
 
 
 
 class Object(object):
 class Object(object):
@@ -104,7 +105,7 @@ class MockLogger(logging.Logger):
         logging.Logger.__init__(self, *args, **kwargs)
         logging.Logger.__init__(self, *args, **kwargs)
 
 
     def _log(self, level, msg, args, **kwargs):
     def _log(self, level, msg, args, **kwargs):
-        self.logged.append((level, msg))
+        self.logged.append((level, msg, args, kwargs))
 
 
 
 
 class mScheduler(beat.Scheduler):
 class mScheduler(beat.Scheduler):
@@ -119,7 +120,7 @@ class mScheduler(beat.Scheduler):
                           "args": args,
                           "args": args,
                           "kwargs": kwargs,
                           "kwargs": kwargs,
                           "options": options})
                           "options": options})
-        return AsyncResult(gen_unique_id())
+        return AsyncResult(uuid())
 
 
 
 
 class mSchedulerSchedulingError(mScheduler):
 class mSchedulerSchedulingError(mScheduler):
@@ -189,9 +190,9 @@ class test_Scheduler(unittest.TestCase):
                       schedule=always_due)
                       schedule=always_due)
         self.assertEqual(scheduler.tick(), 1)
         self.assertEqual(scheduler.tick(), 1)
         self.assertTrue(scheduler.logger.logged[0])
         self.assertTrue(scheduler.logger.logged[0])
-        level, msg = scheduler.logger.logged[0]
+        level, msg, args, kwargs = scheduler.logger.logged[0]
         self.assertEqual(level, logging.ERROR)
         self.assertEqual(level, logging.ERROR)
-        self.assertIn("Couldn't apply scheduled task", msg)
+        self.assertIn("Couldn't apply scheduled task", args[0].args[0])
 
 
     def test_due_tick_RuntimeError(self):
     def test_due_tick_RuntimeError(self):
         scheduler = mSchedulerRuntimeError()
         scheduler = mSchedulerRuntimeError()
@@ -262,7 +263,7 @@ class test_Service(unittest.TestCase):
                 if self.tick_raises_exit:
                 if self.tick_raises_exit:
                     raise SystemExit()
                     raise SystemExit()
                 if self.shutdown_service:
                 if self.shutdown_service:
-                    self.shutdown_service._shutdown.set()
+                    self.shutdown_service._is_shutdown.set()
                 return 0.0
                 return 0.0
 
 
         return beat.Service(scheduler_cls=PersistentScheduler), sh
         return beat.Service(scheduler_cls=PersistentScheduler), sh
@@ -279,12 +280,12 @@ class test_Service(unittest.TestCase):
         s.sync()
         s.sync()
         self.assertTrue(sh.closed)
         self.assertTrue(sh.closed)
         self.assertTrue(sh.synced)
         self.assertTrue(sh.synced)
-        self.assertTrue(s._stopped.isSet())
+        self.assertTrue(s._is_stopped.isSet())
         s.sync()
         s.sync()
         s.stop(wait=False)
         s.stop(wait=False)
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
         s.stop(wait=True)
         s.stop(wait=True)
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
 
 
         p = s.scheduler._store
         p = s.scheduler._store
         s.scheduler._store = None
         s.scheduler._store = None
@@ -295,25 +296,25 @@ class test_Service(unittest.TestCase):
 
 
     def test_start_embedded_process(self):
     def test_start_embedded_process(self):
         s, sh = self.get_service()
         s, sh = self.get_service()
-        s._shutdown.set()
+        s._is_shutdown.set()
         s.start(embedded_process=True)
         s.start(embedded_process=True)
 
 
     def test_start_thread(self):
     def test_start_thread(self):
         s, sh = self.get_service()
         s, sh = self.get_service()
-        s._shutdown.set()
+        s._is_shutdown.set()
         s.start(embedded_process=False)
         s.start(embedded_process=False)
 
 
     def test_start_tick_raises_exit_error(self):
     def test_start_tick_raises_exit_error(self):
         s, sh = self.get_service()
         s, sh = self.get_service()
         s.scheduler.tick_raises_exit = True
         s.scheduler.tick_raises_exit = True
         s.start()
         s.start()
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
 
 
     def test_start_manages_one_tick_before_shutdown(self):
     def test_start_manages_one_tick_before_shutdown(self):
         s, sh = self.get_service()
         s, sh = self.get_service()
         s.scheduler.shutdown_service = s
         s.scheduler.shutdown_service = s
         s.start()
         s.start()
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
 
 
 
 
 class test_EmbeddedService(unittest.TestCase):
 class test_EmbeddedService(unittest.TestCase):

Some files were not shown because too many files changed in this diff