ソースを参照

Merge branch 'master' into utc

Conflicts:
	celery/beat.py
	celery/tests/test_task/test_task.py
	celery/utils/timeutils.py
	celery/worker/job.py
Ask Solem 13 年 前
コミット
d8fd481d51
100 ファイル変更2262 行追加1322 行削除
  1. 95 85
      AUTHORS
  2. 392 11
      Changelog
  3. 13 20
      FAQ
  4. 2 2
      README.rst
  5. 4 3
      celery/__init__.py
  6. 8 7
      celery/app/__init__.py
  7. 7 5
      celery/app/amqp.py
  8. 13 25
      celery/app/base.py
  9. 40 9
      celery/app/defaults.py
  10. 19 1
      celery/app/task/__init__.py
  11. 1 0
      celery/apps/beat.py
  12. 14 3
      celery/apps/worker.py
  13. 11 13
      celery/backends/__init__.py
  14. 20 13
      celery/backends/amqp.py
  15. 22 9
      celery/backends/base.py
  16. 16 5
      celery/backends/cache.py
  17. 1 0
      celery/backends/cassandra.py
  18. 1 0
      celery/backends/database.py
  19. 2 0
      celery/backends/mongodb.py
  20. 1 0
      celery/backends/pyredis.py
  21. 7 6
      celery/backends/redis.py
  22. 1 0
      celery/backends/tyrant.py
  23. 20 9
      celery/beat.py
  24. 24 6
      celery/bin/base.py
  25. 1 1
      celery/bin/camqadm.py
  26. 13 6
      celery/bin/celerybeat.py
  27. 8 3
      celery/bin/celeryctl.py
  28. 3 2
      celery/bin/celeryd.py
  29. 1 0
      celery/bin/celeryd_detach.py
  30. 4 2
      celery/bin/celeryd_multi.py
  31. 10 3
      celery/bin/celeryev.py
  32. 3 2
      celery/concurrency/__init__.py
  33. 10 4
      celery/concurrency/base.py
  34. 1 0
      celery/concurrency/eventlet.py
  35. 2 0
      celery/concurrency/gevent.py
  36. 1 5
      celery/concurrency/processes/__init__.py
  37. 3 2
      celery/concurrency/processes/_win.py
  38. 57 19
      celery/concurrency/processes/pool.py
  39. 1 0
      celery/concurrency/solo.py
  40. 12 0
      celery/concurrency/threads.py
  41. 1 0
      celery/conf.py
  42. 3 0
      celery/contrib/abortable.py
  43. 28 2
      celery/contrib/batches.py
  44. 2 0
      celery/contrib/rdb.py
  45. 115 54
      celery/datastructures.py
  46. 3 0
      celery/db/a805d4bd.py
  47. 3 0
      celery/db/dfd042c7.py
  48. 1 0
      celery/db/models.py
  49. 3 0
      celery/db/session.py
  50. 2 1
      celery/decorators.py
  51. 13 0
      celery/events/__init__.py
  52. 11 0
      celery/events/cursesmon.py
  53. 15 3
      celery/events/dumper.py
  54. 15 0
      celery/events/snapshot.py
  55. 38 10
      celery/events/state.py
  56. 21 0
      celery/exceptions.py
  57. 1 0
      celery/execute/__init__.py
  58. 15 2
      celery/execute/trace.py
  59. 12 2
      celery/loaders/__init__.py
  60. 11 0
      celery/loaders/app.py
  61. 20 1
      celery/loaders/base.py
  62. 11 0
      celery/loaders/default.py
  63. 23 3
      celery/local.py
  64. 28 23
      celery/log.py
  65. 11 6
      celery/messaging.py
  66. 180 26
      celery/platforms.py
  67. 11 1
      celery/registry.py
  68. 21 22
      celery/result.py
  69. 12 0
      celery/routes.py
  70. 74 59
      celery/schedules.py
  71. 8 349
      celery/signals.py
  72. 2 5
      celery/states.py
  73. 16 8
      celery/task/__init__.py
  74. 11 0
      celery/task/base.py
  75. 20 8
      celery/task/chords.py
  76. 16 4
      celery/task/control.py
  77. 25 5
      celery/task/http.py
  78. 4 4
      celery/task/schedules.py
  79. 22 4
      celery/task/sets.py
  80. 3 0
      celery/tests/__init__.py
  81. 2 0
      celery/tests/compat.py
  82. 2 0
      celery/tests/config.py
  83. 2 0
      celery/tests/functional/case.py
  84. 2 0
      celery/tests/functional/tasks.py
  85. 324 0
      celery/tests/test_app/__init__.py
  86. 0 313
      celery/tests/test_app/test_app.py
  87. 2 2
      celery/tests/test_app/test_app_amqp.py
  88. 12 11
      celery/tests/test_app/test_beat.py
  89. 1 0
      celery/tests/test_app/test_celery.py
  90. 30 15
      celery/tests/test_app/test_loaders.py
  91. 2 1
      celery/tests/test_app/test_log.py
  92. 8 4
      celery/tests/test_app/test_routes.py
  93. 9 7
      celery/tests/test_backends/__init__.py
  94. 39 32
      celery/tests/test_backends/test_amqp.py
  95. 45 37
      celery/tests/test_backends/test_base.py
  96. 69 9
      celery/tests/test_backends/test_cache.py
  97. 5 2
      celery/tests/test_backends/test_database.py
  98. 2 0
      celery/tests/test_backends/test_pyredis_compat.py
  99. 3 3
      celery/tests/test_backends/test_redis.py
  100. 8 3
      celery/tests/test_backends/test_redis_unit.py

+ 95 - 85
AUTHORS

@@ -1,85 +1,95 @@
-Ordered by date of first contribution:
-  Ask Solem <ask@celeryproject.org>
-  Grégoire Cachet <gregoire@audacy.fr>
-  Vitaly Babiy <vbabiy86@gmail.com>
-  Brian Rosner <brosner@gmail.com>
-  Sean Creeley <sean.creeley@gmail.com>
-  Ben Firshman <ben@firshman.co.uk>
-  Augusto Becciu <augusto@becciu.org>
-  Jonatan Heyman <jonatan@heyman.info>
-  Mark Hellewell <mark.hellewell@gmail.com>
-  Jerzy Kozera <jerzy.kozera@gmail.com>
-  Brad Jasper <bjasper@gmail.com>
-  Wes Winham <winhamwr@gmail.com>
-  Timo Sugliani
-  Michael Elsdoerfer <michael@elsdoerfer.com>
-  Jason Baker <amnorvend@gmail.com>
-  Wes Turner <wes.turner@gmail.com>
-  Maxim Bodyansky <bodyansky@gmail.com>
-  Rune Halvorsen <runefh@gmail.com>
-  Aaron Ross <aaron@wawd.com>
-  Adam Endicott
-  Jesper Noehr <jesper@noehr.org>
-  Mark Stover <stovenator@gmail.com>
-  Andrew Watts <andrewwatts@gmail.com>
-  Felix Berger <bflat1@gmx.net
-  Reza Lotun <rlotun@gmail.com>
-  Mikhail Korobov <kmike84@gmail.com>
-  Jeff Balogh <me@jeffbalogh.org>
-  Patrick Altman <paltman@gmail.com>
-  Vincent Driessen <vincent@datafox.nl>
-  Hari <haridara@gmail.com>
-  Bartosz Ptaszynski
-  Marcin Lulek <info@webreactor.eu>
-  Honza Kral <honza.kral@gmail.com>
-  Jonas Haag <jonas@lophus.org>
-  Armin Ronacher <armin.ronacher@active-4.com>
-  Gunnlaugur Thor Briem <gunnlaugur@gmail.com>
-  Mikhail Gusarov <dottedmag@dottedmag.net>
-  Frédéric Junod <frederic.junod@camptocamp.com>
-  Lukas Linhart <lukas.linhart@centrumholdings.com>
-  Clay Gerrard
-  David Miller <il.livid.dream@gmail.com>
-  Juarez Bochi <jbochi@gmail.com>
-  Noah Kantrowitz <noah@coderanger.net>
-  Gert Van Gool <gertvangool@gmail.com>
-  sdcooke
-  David Cramer <dcramer@gmail.com>
-  Bryan Berg <bryan@mixedmedialabs.com>
-  Piotr Sikora <piotr.sikora@frickle.com>
-  Sam Cooke <sam@mixcloud.com>
-  John Watson <johnw@mahalo.com>
-  Martin Galpin <m@66laps.com>
-  Chris Rose <offby1@offby1.net>
-  Christopher Peplin <peplin@bueda.com>
-  David White <dpwhite2@ncsu.edu>
-  Vladimir Kryachko <vladimir.kryachko@etvnet.com>
-  Simon Josi <simon.josi@atizo.com>
-  jpellerin
-  Norman Richards <orb@nostacktrace.com>
-  Christoph Burgmer <christoph@nwebs.de>
-  Allan Caffee <allan.caffee@gmail.com>
-  Ales Zoulek <ales.zoulek@gmail.com>
-  Roberto Gaiser <gaiser@geekbunker.org>
-  Balachandran C <balachandran.c@gramvaani.org>
-  Kevin Tran <hekevintran@gmail.com>
-  Branko Čibej <brane@apache.org>
-  Jeff Terrace <jterrace@gmail.com>
-  Ryan Petrello <lists@ryanpetrello.com>
-  Marcin Kuźmiński <marcin@python-works.com>
-  Adriano Petrich <petrich@gmail.com>
-  David Strauss <david@davidstrauss.net>
-  David Arthur <mumrah@gmail.com>
-  Miguel Hernandez Martos <enlavin@gmail.com>
-  Jannis Leidel <jannis@leidel.info>
-  Harm Verhagen <harm.verhagen@gmail.com>
-  lookfwd <lookfwd@gmail.com>
-  Mauro Rocco <fireantology@gmail.com>
-  Matthew J Morrison <mattj.morrison@gmail.com>
-  Daniel Watkins <daniel@daniel-watkins.co.uk>
-  Remy Noel <mocramis@gmail.com>
-  Leo Dirac <leo@banyanbranch.com>
-  Greg Haskins <greg@greghaskins.com>
-  Stefan Kjartansson <esteban.supreme@gmail.com>
-  Chris Adams <chris@improbable.org>
-  Steeve Morin <steeve.morin@gmail.com>
+=========
+ AUTHORS
+=========
+:order: sorted
+
+Aaron Ross <aaron@wawd.com>
+Adam Endicott
+Adriano Petrich <petrich@gmail.com>
+Ales Zoulek <ales.zoulek@gmail.com>
+Allan Caffee <allan.caffee@gmail.com>
+Andrew Watts <andrewwatts@gmail.com>
+Armin Ronacher <armin.ronacher@active-4.com>
+Ask Solem <ask@celeryproject.org>
+Augusto Becciu <augusto@becciu.org>
+Balachandran C <balachandran.c@gramvaani.org>
+Bartosz Ptaszynski
+Ben Firshman <ben@firshman.co.uk>
+Brad Jasper <bjasper@gmail.com>
+Branko Čibej <brane@apache.org>
+Brian Rosner <brosner@gmail.com>
+Bryan Berg <bryan@mixedmedialabs.com>
+Chase Seibert <chase.seibert+github@gmail.com>
+Chris Adams <chris@improbable.org>
+Chris Rose <offby1@offby1.net>
+Christoph Burgmer <christoph@nwebs.de>
+Christopher Peplin <peplin@bueda.com>
+Clay Gerrard
+Dan McGee <dan@archlinux.org>
+Daniel Watkins <daniel@daniel-watkins.co.uk>
+David Arthur <mumrah@gmail.com>
+David Cramer <dcramer@gmail.com>
+David Miller <il.livid.dream@gmail.com>
+David Strauss <david@davidstrauss.net>
+David White <dpwhite2@ncsu.edu>
+Felix Berger <bflat1@gmx.net
+Frédéric Junod <frederic.junod@camptocamp.com>
+Gert Van Gool <gertvangool@gmail.com>
+Greg Haskins <greg@greghaskins.com>
+Grégoire Cachet <gregoire@audacy.fr>
+Gunnlaugur Thor Briem <gunnlaugur@gmail.com>
+Hari <haridara@gmail.com>
+Harm Verhagen <harm.verhagen@gmail.com>
+Honza Kral <honza.kral@gmail.com>
+Jannis Leidel <jannis@leidel.info>
+Jason Baker <amnorvend@gmail.com>
+Jeff Balogh <me@jeffbalogh.org>
+Jeff Terrace <jterrace@gmail.com>
+Jerzy Kozera <jerzy.kozera@gmail.com>
+Jesper Noehr <jesper@noehr.org>
+John Watson <johnw@mahalo.com>
+Jonas Haag <jonas@lophus.org>
+Jonatan Heyman <jonatan@heyman.info>
+Joshua Ginsberg <jag@flowtheory.net>
+Juarez Bochi <jbochi@gmail.com>
+Kevin Tran <hekevintran@gmail.com>
+Kornelijus Survila <kornholijo@gmail.com>
+Leo Dirac <leo@banyanbranch.com>
+Lukas Linhart <lukas.linhart@centrumholdings.com>
+Marcin Kuźmiński <marcin@python-works.com>
+Marcin Lulek <info@webreactor.eu>
+Mark Hellewell <mark.hellewell@gmail.com>
+Mark Stover <stovenator@gmail.com>
+Martin Galpin <m@66laps.com>
+Matthew J Morrison <mattj.morrison@gmail.com>
+Mauro Rocco <fireantology@gmail.com>
+Maxim Bodyansky <bodyansky@gmail.com>
+Michael Elsdoerfer <michael@elsdoerfer.com>
+Miguel Hernandez Martos <enlavin@gmail.com>
+Mikhail Gusarov <dottedmag@dottedmag.net>
+Mikhail Korobov <kmike84@gmail.com>
+Neil Chintomby <neil@mochimedia.com>
+Noah Kantrowitz <noah@coderanger.net>
+Norman Richards <orb@nostacktrace.com>
+Patrick Altman <paltman@gmail.com>
+Piotr Sikora <piotr.sikora@frickle.com>
+Remy Noel <mocramis@gmail.com>
+Reza Lotun <rlotun@gmail.com>
+Roberto Gaiser <gaiser@geekbunker.org>
+Rune Halvorsen <runefh@gmail.com>
+Ryan Petrello <lists@ryanpetrello.com>
+Sam Cooke <sam@mixcloud.com>
+Sean Creeley <sean.creeley@gmail.com>
+Simon Josi <simon.josi@atizo.com>
+Steeve Morin <steeve.morin@gmail.com>
+Stefan Kjartansson <esteban.supreme@gmail.com>
+Timo Sugliani
+Vincent Driessen <vincent@datafox.nl>
+Vitaly Babiy <vbabiy86@gmail.com>
+Vladimir Kryachko <vladimir.kryachko@etvnet.com>
+Wes Turner <wes.turner@gmail.com>
+Wes Winham <winhamwr@gmail.com>
+jpellerin
+kuno <neokuno@gmail.com>
+lookfwd <lookfwd@gmail.com>
+sdcooke

+ 392 - 11
Changelog

@@ -5,6 +5,387 @@
 .. contents::
     :local:
 
+.. _version-2.4.1:
+
+2.4.1
+=====
+:release-date: 2011-11-07 06:00 P.M GMT
+:by: Ask Solem
+
+* celeryctl inspect commands was missing output.
+
+* processes pool: Decrease polling interval for less idle CPU usage.
+
+* processes pool: MaybeEncodingError was not wrapped in ExceptionInfo
+  (Issue #524).
+
+* celeryd: would silence errors occuring after task consumer started.
+
+* logging: Fixed a bug where unicode in stdout redirected log messages
+  couldn't be written (Issue #522).
+
+.. _version-2.4.0:
+
+2.4.0
+=====
+:release-date: 2011-11-04 04:00 P.M GMT
+:by: Ask Solem
+
+.. _v240-important:
+
+Important Notes
+---------------
+
+* Now supports Python 3.
+
+* Fixed deadlock in worker process handling (Issue #496).
+
+    A deadlock could occur after spawning new child processes because
+    the logging library's mutex was not properly reset after fork.
+
+    The symptoms of this bug affecting would be that the worker simply
+    stops processing tasks, as none of the workers child processes
+    are functioning.  There was a greater chance of this bug occurring
+    with ``maxtasksperchild`` or a time-limit enabled.
+
+    This is a workaround for http://bugs.python.org/issue6721#msg140215.
+
+    Be aware that while this fixes the logging library lock,
+    there could still be other locks initialized in the parent
+    process, introduced by custom code.
+
+    Fix contributed by Harm Verhagen.
+
+* AMQP Result backend: Now expires results by default.
+
+    The default expiration value is now taken from the
+    :setting:`CELERY_TASK_RESULT_EXPIRES` setting.
+
+    The old :setting:`CELERY_AMQP_TASK_RESULT_EXPIRES` setting has been
+    deprecated and will be removed in version 3.0.
+
+    Note that this means that the result backend requires RabbitMQ 1.1.0 or
+    higher, and that you have to disable expiration if you are running
+    with an older version.  You can do so by disabling the
+    :setting:`CELERY_TASK_RESULT_EXPIRES` setting::
+
+        CELERY_TASK_RESULT_EXPIRES = None
+
+* Eventlet: Fixed problem with shutdown (Issue #457).
+
+* Broker transports can be now be specified using URLs
+
+    The broker can now be specified as an URL instead.
+    This URL must have the format::
+
+        transport://user:password@hostname:port/virtual_host
+
+    for example the default broker is written as::
+
+        amqp://guest:guest@localhost:5672//
+
+    The scheme is required, so that the host is identified
+    as an URL and not just a host name.
+    User, password, port and virtual_host are optional and
+    defaults to the particular transports default value.
+
+    .. note::
+
+        Note that the path component (virtual_host) always starts with a
+        forward-slash.  This is necessary to distinguish between the virtual
+        host ``''`` (empty) and ``'/'``, which are both acceptable virtual
+        host names.
+
+        A virtual host of ``'/'`` becomes:
+
+            amqp://guest:guest@localhost:5672//
+
+        and a virtual host of ``''`` (empty) becomes::
+
+            amqp://guest:guest@localhost:5672/
+
+        So the leading slash in the path component is **always required**.
+
+    In addition the :setting:`BROKER_URL` setting has been added as an alias
+    to ``BROKER_HOST``.  Any broker setting specified in both the URL and in
+    the configuration will be ignored, if a setting is not provided in the URL
+    then the value from the configuration will be used as default.
+
+    Also, programs now support the :option:`-b|--broker` option to specify
+    a broker URL on the command line::
+
+        $ celeryd -b redis://localhost
+
+        $ celeryctl -b amqp://guest:guest@localhost//e
+
+    The environment variable :envvar:`CELERY_BROKER_URL` can also be used to
+    easily override the default broker used.
+
+* The deprecated :func:`celery.loaders.setup_loader` function has been removed.
+
+* The :setting:`CELERY_TASK_ERROR_WHITELIST` setting has been replaced
+  by a more flexible approach (Issue #447).
+
+    The error mail sending logic is now available as ``Task.ErrorMail``,
+    with the implementation (for reference) in :mod:`celery.utils.mail`.
+
+    The error mail class can be sub-classed to gain complete control
+    of when error messages are sent, thus removing the need for a separate
+    white-list setting.
+
+    The :setting:`CELERY_TASK_ERROR_WHITELIST` setting has been deprecated,
+    and will be removed completely in version 3.0.
+
+* Additional Deprecations
+
+    The following functions has been deprecated and is scheduled for removal in
+    version 3.0:
+
+    =====================================  ===================================
+    **Old function**                       **Alternative**
+    =====================================  ===================================
+    `celery.loaders.current_loader`        `celery.current_app.loader`
+    `celery.loaders.load_settings`         `celery.current_app.conf`
+    `celery.execute.apply`                 `Task.apply`
+    `celery.execute.apply_async`           `Task.apply_async`
+    `celery.execute.delay_task`            `celery.execute.send_task`
+    =====================================  ===================================
+
+    The following settings has been deprecated and is scheduled for removal
+    in version 3.0:
+
+    =====================================  ===================================
+    **Old setting**                        **Alternative**
+    =====================================  ===================================
+    `CELERYD_LOG_LEVEL`                    ``celeryd --loglevel=``
+    `CELERYD_LOG_FILE`                     ``celeryd --logfile=``
+    `CELERYBEAT_LOG_LEVEL`                 ``celerybeat --loglevel=``
+    `CELERYBEAT_LOG_FILE`                  ``celerybeat --logfile=``
+    `CELERYMON_LOG_LEVEL`                  ``celerymon --loglevel=``
+    `CELERYMON_LOG_FILE`                   ``celerymon --logfile=``
+    =====================================  ===================================
+
+.. _v240-news:
+
+News
+----
+
+* No longer depends on :mod:`pyparsing`.
+
+* Now depends on Kombu 1.4.3.
+
+* CELERY_IMPORTS can now be a scalar value (Issue #485).
+
+    It is too easy to forget to add the comma after the sole element of a
+    tuple, and this is something that often affects newcomers.
+
+    The docs should probably use a list in examples, as using a tuple
+    for this doesn't even make sense.  Nonetheless, there are many
+    tutorials out there using a tuple, and this change should be a help
+    to new users.
+
+    Suggested by jsaxon-cars.
+
+* Fixed a memory leak when using the thread pool (Issue #486).
+
+    Contributed by Kornelijus Survila.
+
+* The statedb was not saved at exit.
+
+    This has now been fixed and it should again remember previously
+    revoked tasks when a ``--statedb`` is enabled.
+
+* Adds :setting:`EMAIL_USE_TLS` to enable secure SMTP connections
+  (Issue #418).
+
+    Contributed by Stefan Kjartansson.
+
+* Now handles missing fields in task messages as documented in the message
+  format documentation.
+
+    * Missing required field throws :exc:`InvalidTaskError`
+    * Missing args/kwargs is assumed empty.
+
+    Contributed by Chris Chamberlin.
+
+* Fixed race condition in celery.events.state (celerymon/celeryev)
+  where task info would be removed while iterating over it (Issue #501).
+
+* The Cache, Cassandra, MongoDB, Redis and Tyrant backends now respects
+  the :setting:`CELERY_RESULT_SERIALIZER` setting (Issue #435).
+
+    This means that only the database (django/sqlalchemy) backends
+    currently does not support using custom serializers.
+
+    Contributed by Steeve Morin
+
+* Logging calls no longer manually formats messages, but delegates
+  that to the logging system, so tools like Sentry can easier
+  work with the messages (Issue #445).
+
+    Contributed by Chris Adams.
+
+* ``celeryd_multi`` now supports a ``stop_verify`` command to wait for
+  processes to shutdown.
+
+* Cache backend did not work if the cache key was unicode (Issue #504).
+
+    Fix contributed by Neil Chintomby.
+
+* New setting :setting:`CELERY_RESULT_DB_SHORT_LIVED_SESSIONS` added,
+  which if enabled will disable the caching of SQLAlchemy sessions
+  (Issue #449).
+
+    Contributed by Leo Dirac.
+
+* All result backends now implements ``__reduce__`` so that they can
+  be pickled (Issue #441).
+
+    Fix contributed by Remy Noel
+
+* celeryd-multi did not work on Windows (Issue #472).
+
+* New-style ``CELERY_REDIS_*`` settings now takes precedence over
+  the old ``REDIS_*`` configuration keys (Issue #508).
+
+    Fix contributed by Joshua Ginsberg
+
+* Generic celerybeat init script no longer sets `bash -e` (Issue #510).
+
+    Fix contributed by Roger Hu.
+
+* Documented that Chords do not work well with redis-server versions
+  before 2.2.
+
+    Contributed by Dan McGee.
+
+* The :setting:`CELERYBEAT_MAX_LOOP_INTERVAL` setting was not respected.
+
+* ``inspect.registered_tasks`` renamed to ``inspect.registered`` for naming
+  consistency.
+
+    The previous name is still available as an alias.
+
+    Contributed by Mher Movsisyan
+
+* Worker logged the string representation of args and kwargs
+  without safe guards (Issue #480).
+
+* RHEL init script: Changed celeryd startup priority.
+
+    The default start / stop priorities for MySQL on RHEL are
+
+        # chkconfig: - 64 36
+
+    Therefore, if Celery is using a database as a broker / message store, it
+    should be started after the database is up and running, otherwise errors
+    will ensue. This commit changes the priority in the init script to
+
+        # chkconfig: - 85 15
+
+    which are the default recommended settings for 3-rd party applications
+    and assure that Celery will be started after the database service & shut
+    down before it terminates.
+
+    Contributed by Yury V. Zaytsev.
+
+* KeyValueStoreBackend.get_many did not respect the ``timeout`` argument
+  (Issue #512).
+
+* celerybeat/celeryev's --workdir option did not chdir before after
+  configuration was attempted (Issue #506).
+
+* After deprecating 2.4 support we can now name modules correctly, since we
+  can take use of absolute imports.
+
+    Therefore the following internal modules have been renamed:
+
+        celery.concurrency.evlet    -> celery.concurrency.eventlet
+        celery.concurrency.evg      -> celery.concurrency.gevent
+
+* AUTHORS file is now sorted alphabetically.
+
+    Also, as you may have noticed the contributors of new features/fixes are
+    now mentioned in the Changelog.
+
+.. _version-2.3.3:
+
+2.3.3
+=====
+:release-date: 2011-16-09 05:00 P.M BST
+:by: Mher Movsisyan
+
+* Monkey patching :attr:`sys.stdout` could result in the worker
+  crashing if the replacing object did not define :meth:`isatty`
+  (Issue #477).
+
+* ``CELERYD`` option in :file:`/etc/default/celeryd` should not
+  be used with generic init scripts.
+
+
+.. _version-2.3.2:
+
+2.3.2
+=====
+:release-date: 2011-10-07 05:00 P.M BST
+
+.. _v232-news:
+
+News
+----
+
+* Improved Contributing guide.
+
+    If you'd like to contribute to Celery you should read this
+    guide: http://ask.github.com/celery/contributing.html
+
+    We are looking for contributors at all skill levels, so don't
+    hesitate!
+
+* Now depends on Kombu 1.3.1
+
+* ``Task.request`` now contains the current worker host name (Issue #460).
+
+    Available as ``task.request.hostname``.
+
+* It is now easier for app subclasses to extend how they are pickled.
+    (see :class:`celery.app.AppPickler`).
+
+.. _v232-fixes:
+
+Fixes
+-----
+
+* `purge/discard_all` was not working correctly (Issue #455).
+
+* The coloring of log messages didn't handle non-ASCII data well
+  (Issue #427).
+
+* [Windows] the multiprocessing pool tried to import ``os.kill``
+  even though this is not available there (Issue #450).
+
+* Fixes case where the worker could become unresponsive because of tasks
+  exceeding the hard time limit.
+
+* The ``task-sent`` event was missing from the event reference.
+
+* ``ResultSet.iterate`` now returns results as they finish (Issue #459).
+
+    This was not the case previously, even though the documentation
+    states this was the expected behavior.
+
+* Retries will no longer be performed when tasks are called directly
+  (using ``__call__``).
+
+   Instead the exception passed to ``retry`` will be re-raised.
+
+* Eventlet no longer crashes if autoscale is enabled.
+
+    growing and shrinking eventlet pools is still not supported.
+
+* py24 target removed from :file:`tox.ini`.
+
+
 .. _version-2.3.1:
 
 2.3.1
@@ -1436,7 +1817,7 @@ News
     :class:`~celery.task.control.inspect`.
 
 
-    Example using celeryctl to start consuming from queue "queue", in 
+    Example using celeryctl to start consuming from queue "queue", in
     exchange "exchange", of type "direct" using binding key "key"::
 
         $ celeryctl inspect add_consumer queue exchange direct key
@@ -3359,7 +3740,7 @@ Fixes
   by rounding to the nearest day/hour.
 
 * Fixed a potential infinite loop in `BaseAsyncResult.__eq__`, although
-  there is no evidence that it has ever been triggered. 
+  there is no evidence that it has ever been triggered.
 
 * celeryd: Now handles messages with encoding problems by acking them and
   emitting an error message.
@@ -3422,7 +3803,7 @@ Fixes
 
 * Execution: `.messaging.TaskPublisher.send_task` now
   incorporates all the functionality apply_async previously did.
-  
+
     Like converting countdowns to eta, so :func:`celery.execute.apply_async` is
     now simply a convenient front-end to
     :meth:`celery.messaging.TaskPublisher.send_task`, using
@@ -3479,7 +3860,7 @@ Fixes
     is revoked even though it's currently being hold because its eta is e.g.
     a week into the future.
 
-* The `task_id` argument is now respected even if the task is executed 
+* The `task_id` argument is now respected even if the task is executed
   eagerly (either using apply, or :setting:`CELERY_ALWAYS_EAGER`).
 
 * The internal queues are now cleared if the connection is reset.
@@ -3751,7 +4132,7 @@ News
     ...                   ([8, 8], {}, {"countdown": 3})])
     >>> ts.run()
 
-* Got a 3x performance gain by setting the prefetch count to four times the 
+* Got a 3x performance gain by setting the prefetch count to four times the
   concurrency, (from an average task round-trip of 0.1s to 0.03s!).
 
     A new setting has been added: :setting:`CELERYD_PREFETCH_MULTIPLIER`, which
@@ -3888,7 +4269,7 @@ Documentation
 :release-date: 2009-11-20 03:40 P.M CEST
 
 * QOS Prefetch count was not applied properly, as it was set for every message
-  received (which apparently behaves like, "receive one more"), instead of only 
+  received (which apparently behaves like, "receive one more"), instead of only
   set when our wanted value changed.
 
 .. _version-0.8.1:
@@ -4065,7 +4446,7 @@ Important changes
 
 * Support for multiple AMQP exchanges and queues.
 
-    This feature misses documentation and tests, so anyone interested 
+    This feature misses documentation and tests, so anyone interested
     is encouraged to improve this situation.
 
 * celeryd now survives a restart of the AMQP server!
@@ -4188,7 +4569,7 @@ News
 
 * Functions/methods with a timeout argument now works correctly.
 
-* New: `celery.strategy.even_time_distribution`: 
+* New: `celery.strategy.even_time_distribution`:
     With an iterator yielding task args, kwargs tuples, evenly distribute
     the processing of its tasks throughout the time window available.
 
@@ -4351,7 +4732,7 @@ News
 *  Only use README as long_description if the file exists so easy_install
    doesn't break.
 
-* `celery.view`: JSON responses now properly set its mime-type. 
+* `celery.view`: JSON responses now properly set its mime-type.
 
 * `apply_async` now has a `connection` keyword argument so you
   can re-use the same AMQP connection if you want to execute
@@ -4537,7 +4918,7 @@ arguments, so be sure to flush your task queue before you upgrade.
   version to 0.2. This is a pre-release.
 
 * `celery.task.mark_as_read()` and `celery.task.mark_as_failure()` has
-  been removed. Use `celery.backends.default_backend.mark_as_read()`, 
+  been removed. Use `celery.backends.default_backend.mark_as_read()`,
   and `celery.backends.default_backend.mark_as_failure()` instead.
 
 .. _version-0.1.15:
@@ -4594,7 +4975,7 @@ arguments, so be sure to flush your task queue before you upgrade.
   happened.  It kind of works like the `multiprocessing.AsyncResult`
   class returned by `multiprocessing.Pool.map_async`.
 
-* Added dmap() and dmap_async(). This works like the 
+* Added dmap() and dmap_async(). This works like the
   `multiprocessing.Pool` versions except they are tasks
   distributed to the celery server. Example:
 

+ 13 - 20
FAQ

@@ -55,12 +55,12 @@ Is Celery dependent on pickle?
 
 **Answer:** No.
 
-Celery can support any serialization scheme and has support for JSON/YAML and
-Pickle by default. And as every task is associated with a content type, you
-can even send one task using pickle, and another using JSON.
+Celery can support any serialization scheme and has built-in support for
+JSON, YAML, Pickle and msgpack. Also, as every task is associated with a
+content type, you can even send one task using pickle, and another using JSON.
 
 The default serialization format is pickle simply because it is
-convenient as it supports sending complex Python objects as task arguments.
+convenient (it supports sending complex Python objects as task arguments).
 
 If you need to communicate with other languages you should change
 to a serialization format that is suitable for that.
@@ -114,7 +114,7 @@ Is Celery multilingual?
 
 **Answer:** Yes.
 
-:mod:`~celery.bin.celeryd` is an implementation of Celery in python. If the
+:mod:`~celery.bin.celeryd` is an implementation of Celery in Python. If the
 language has an AMQP client, there shouldn't be much work to create a worker
 in your language.  A Celery worker is just a program connecting to the broker
 to process messages.
@@ -180,8 +180,8 @@ most systems), it usually contains a message describing the reason.
 
 .. _faq-celeryd-on-freebsd:
 
-Why won't celeryd run on FreeBSD?
----------------------------------
+Does it work on FreeBSD?
+------------------------
 
 **Answer:** The multiprocessing pool requires a working POSIX semaphore
 implementation which isn't enabled in FreeBSD by default. You have to enable
@@ -252,7 +252,7 @@ other error is happening.
 
 .. _faq-periodic-task-does-not-run:
 
-Why won't my Periodic Task run?
+Why won't my periodic task run?
 -------------------------------
 
 **Answer:** See `Why won't my Task run?`_.
@@ -405,6 +405,10 @@ When running with the AMQP result backend, every task result will be sent
 as a message. If you don't collect these results, they will build up and
 RabbitMQ will eventually run out of memory.
 
+Results expire after 1 day by default.  It may be a good idea
+to lower this value by configuring the :setting:`CELERY_TASK_RESULT_EXPIRES`
+setting.
+
 If you don't use the results for a task, make sure you set the
 `ignore_result` option:
 
@@ -417,17 +421,6 @@ If you don't use the results for a task, make sure you set the
     class MyTask(Task):
         ignore_result = True
 
-Results can also be disabled globally using the
-:setting:`CELERY_IGNORE_RESULT` setting.
-
-.. note::
-
-    Celery version 2.1 added support for automatic expiration of
-    AMQP result backend results.
-
-    To use this you need to run RabbitMQ 2.1 or higher and enable
-    the :setting:`CELERY_AMQP_TASK_RESULT_EXPIRES` setting.
-
 .. _faq-use-celery-with-stomp:
 
 Can I use Celery with ActiveMQ/STOMP?
@@ -461,7 +454,7 @@ Tasks
 How can I reuse the same connection when applying tasks?
 --------------------------------------------------------
 
-**Answer**: Yes! See the :setting:`BROKER_POOL_LIMIT` setting.
+**Answer**: See the :setting:`BROKER_POOL_LIMIT` setting.
 This setting will be enabled by default in 3.0.
 
 .. _faq-execute-task-by-name:

+ 2 - 2
README.rst

@@ -4,7 +4,7 @@
 
 .. image:: http://cloud.github.com/downloads/ask/celery/celery_128.png
 
-:Version: 2.3.1
+:Version: 2.4.1
 :Web: http://celeryproject.org/
 :Download: http://pypi.python.org/pypi/celery/
 :Source: http://github.com/ask/celery/
@@ -256,7 +256,7 @@ Mailing list
 ------------
 
 For discussions about the usage, development, and future of celery,
-please join the `celery-users`_ mailing list. 
+please join the `celery-users`_ mailing list.
 
 .. _`celery-users`: http://groups.google.com/group/celery-users/
 

+ 4 - 3
celery/__init__.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """Distributed Task Queue"""
 # :copyright: (c) 2009 - 2011 by Ask Solem.
 # :license:   BSD, see LICENSE for more details.
@@ -7,7 +8,7 @@ from __future__ import absolute_import
 import os
 import sys
 
-VERSION = (2, 3, 1)
+VERSION = (2, 4, 1)
 
 __version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
 __author__ = "Ask Solem"
@@ -26,10 +27,10 @@ def Celery(*args, **kwargs):
     return App(*args, **kwargs)
 
 if not os.environ.get("CELERY_NO_EVAL", False):
-    from .local import LocalProxy
+    from .local import Proxy
 
     def _get_current_app():
         from .app import current_app
         return current_app()
 
-    current_app = LocalProxy(_get_current_app)
+    current_app = Proxy(_get_current_app)

+ 8 - 7
celery/app/__init__.py

@@ -1,11 +1,12 @@
+# -*- coding: utf-8 -*-
 """
-celery.app
-==========
+    celery.app
+    ~~~~~~~~~~
 
-Celery Application.
+    Celery Application.
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 """
 
@@ -247,12 +248,12 @@ def _app_or_default_trace(app=None):  # pragma: no cover
     from multiprocessing import current_process
     if app is None:
         if getattr(_tls, "current_app", None):
-            print("-- RETURNING TO CURRENT APP --")
+            print("-- RETURNING TO CURRENT APP --")  # noqa+
             print_stack()
             return _tls.current_app
         if current_process()._name == "MainProcess":
             raise Exception("DEFAULT APP")
-        print("-- RETURNING TO DEFAULT APP --")
+        print("-- RETURNING TO DEFAULT APP --")      # noqa+
         print_stack()
         return default_app
     return app

+ 7 - 5
celery/app/amqp.py

@@ -1,14 +1,16 @@
 # -*- coding: utf-8 -*-
 """
-celery.app.amqp
-===============
+    celery.app.amqp
+    ~~~~~~~~~~~~~~~
 
-AMQ related functionality.
+    AMQ related functionality.
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 """
+from __future__ import absolute_import
+
 from datetime import datetime, timedelta
 
 from kombu import BrokerConnection, Exchange

+ 13 - 25
celery/app/base.py

@@ -1,11 +1,12 @@
+# -*- coding: utf-8 -*-
 """
-celery.app.base
-===============
+    celery.app.base
+    ~~~~~~~~~~~~~~~
 
-Application Base Class.
+    Application Base Class.
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 """
 from __future__ import absolute_import
@@ -13,7 +14,6 @@ from __future__ import with_statement
 
 import os
 import platform as _platform
-import sys
 
 from contextlib import contextmanager
 from copy import deepcopy
@@ -21,9 +21,10 @@ from functools import wraps
 from threading import Lock
 
 from .. import datastructures
+from .. import platforms
 from ..utils import cached_property, instantiate, lpmerge
 
-from .defaults import DEFAULTS
+from .defaults import DEFAULTS, find_deprecated_settings
 
 import kombu
 if kombu.VERSION < (1, 1, 0):
@@ -36,20 +37,6 @@ settings -> transport:%(transport)s results:%(results)s
 """
 
 
-def pyimplementation():
-    if hasattr(_platform, "python_implementation"):
-        return _platform.python_implementation()
-    elif sys.platform.startswith("java"):
-        return "Jython %s" % (sys.platform, )
-    elif hasattr(sys, "pypy_version_info"):
-        v = ".".join(map(str, sys.pypy_version_info[:3]))
-        if sys.pypy_version_info[3:]:
-            v += "-" + "".join(map(str, sys.pypy_version_info[3:]))
-        return "PyPy %s" % (v, )
-    else:
-        return "CPython"
-
-
 class LamportClock(object):
     """Lamport's logical clock.
 
@@ -130,9 +117,9 @@ class Settings(datastructures.ConfigurationView):
 
 class BaseApp(object):
     """Base class for apps."""
-    SYSTEM = _platform.system()
-    IS_OSX = SYSTEM == "Darwin"
-    IS_WINDOWS = SYSTEM == "Windows"
+    SYSTEM = platforms.SYSTEM
+    IS_OSX = platforms.IS_OSX
+    IS_WINDOWS = platforms.IS_WINDOWS
 
     amqp_cls = "celery.app.amqp.AMQP"
     backend_cls = None
@@ -315,6 +302,7 @@ class BaseApp(object):
 
     def prepare_config(self, c):
         """Prepare configuration before it is merged with the defaults."""
+        find_deprecated_settings(c)
         return c
 
     def mail_admins(self, subject, body, fail_silently=False):
@@ -364,7 +352,7 @@ class BaseApp(object):
         import kombu
         return BUGREPORT_INFO % {"system": _platform.system(),
                                  "arch": _platform.architecture(),
-                                 "py_i": pyimplementation(),
+                                 "py_i": platforms.pyimplementation(),
                                  "celery_v": celery.__version__,
                                  "kombu_v": kombu.__version__,
                                  "py_v": _platform.python_version(),

+ 40 - 9
celery/app/defaults.py

@@ -1,5 +1,19 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.app.defaults
+    ~~~~~~~~~~~~~~~~~~~
+
+    Configuration introspection and defaults.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
 import sys
 
+from collections import deque
 from datetime import timedelta
 
 is_jython = sys.platform.startswith("java")
@@ -32,12 +46,17 @@ def str_to_bool(term, table={"false": False, "no": False, "0": False,
 
 
 class Option(object):
+    alt = None
+    deprecate_by = None
+    remove_by = None
     typemap = dict(string=str, int=int, float=float, any=lambda v: v,
                    bool=str_to_bool, dict=dict, tuple=tuple)
 
     def __init__(self, default=None, *args, **kwargs):
         self.default = default
         self.type = kwargs.get("type") or "string"
+        for attr, value in kwargs.iteritems():
+            setattr(self, attr, value)
 
     def to_python(self, value):
         return self.typemap[self.type](value)
@@ -175,13 +194,25 @@ NAMESPACES = {
 }
 
 
-def _flatten(d, ns=""):
-    acc = []
-    for key, value in d.iteritems():
-        if isinstance(value, dict):
-            acc.extend(_flatten(value, ns=key + '_'))
-        else:
-            acc.append((ns + key, value.default))
-    return acc
+def flatten(d, ns=""):
+    stack = deque([(ns, d)])
+    while stack:
+        name, space = stack.popleft()
+        for key, value in space.iteritems():
+            if isinstance(value, dict):
+                stack.append((name + key + '_', value))
+            else:
+                yield name + key, value
+
+
+def find_deprecated_settings(source):
+    from celery.utils import warn_deprecated
+    for name, opt in flatten(NAMESPACES):
+        if (opt.deprecate_by or opt.remove_by) and getattr(source, name, None):
+            warn_deprecated(description="The %r setting" % (name, ),
+                            deprecation=opt.deprecate_by,
+                            removal=opt.remove_by,
+                            alternative=opt.alt)
+
 
-DEFAULTS = dict(_flatten(NAMESPACES))
+DEFAULTS = dict((key, value.default) for key, value in flatten(NAMESPACES))

+ 19 - 1
celery/app/task/__init__.py

@@ -1,4 +1,15 @@
-# -*- coding: utf-8 -*-"
+# -*- coding: utf-8 -*-
+"""
+    celery.app.task
+    ~~~~~~~~~~~~~~~
+
+    Tasks Implementation.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+
 from __future__ import absolute_import
 
 import sys
@@ -10,6 +21,7 @@ from ...execute.trace import TaskTrace
 from ...registry import tasks, _unpickle_task
 from ...result import EagerResult
 from ...utils import fun_takes_kwargs, mattrgetter, uuid
+from ...utils.mail import ErrorMail
 
 extract_exec_options = mattrgetter("queue", "routing_key",
                                    "exchange", "immediate",
@@ -103,6 +115,7 @@ class BaseTask(object):
     """
     __metaclass__ = TaskType
 
+    ErrorMail = ErrorMail
     MaxRetriesExceededError = MaxRetriesExceededError
 
     #: The application instance associated with this task class.
@@ -661,6 +674,11 @@ class BaseTask(object):
         """
         pass
 
+    def send_error_email(self, context, exc, **kwargs):
+        if self.send_error_emails and not self.disable_error_emails:
+            sender = self.ErrorMail(self, **kwargs)
+            sender.send(context, exc)
+
     def on_success(self, retval, task_id, args, kwargs):
         """Success handler.
 

+ 1 - 0
celery/apps/beat.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 import atexit

+ 14 - 3
celery/apps/worker.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 import atexit
@@ -14,9 +15,16 @@ import warnings
 from .. import __version__, platforms, signals
 from ..app import app_or_default
 from ..exceptions import ImproperlyConfigured, SystemTerminate
-from ..utils import get_full_cls_name, LOG_LEVELS, cry
+from ..utils import get_full_cls_name, isatty, LOG_LEVELS, cry
 from ..worker import WorkController
 
+try:
+    from greenlet import GreenletExit
+    IGNORE_ERRORS = (GreenletExit, )
+except ImportError:
+    IGNORE_ERRORS = ()
+
+
 BANNER = """
  -------------- celery@%(hostname)s v%(version)s
 ---- **** -----
@@ -98,7 +106,7 @@ class Worker(object):
         if autoscale:
             max_c, _, min_c = autoscale.partition(",")
             self.autoscale = [int(max_c), min_c and int(min_c) or 0]
-        self._isatty = sys.stdout.isatty()
+        self._isatty = isatty(sys.stdout)
 
         self.colored = app.log.colored(self.logfile)
 
@@ -135,7 +143,10 @@ class Worker(object):
               str(self.colored.reset(self.extra_info())))
         self.set_process_status("-active-")
 
-        self.run_worker()
+        try:
+            self.run_worker()
+        except IGNORE_ERRORS:
+            pass
 
     def on_consumer_ready(self, consumer):
         signals.worker_ready.send(sender=consumer)

+ 11 - 13
celery/backends/__init__.py

@@ -1,8 +1,10 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 from .. import current_app
-from ..local import LocalProxy
+from ..local import Proxy
 from ..utils import get_cls_by_name
+from ..utils.functional import memoize
 
 BACKEND_ALIASES = {
     "amqp": "celery.backends.amqp.AMQPBackend",
@@ -15,23 +17,19 @@ BACKEND_ALIASES = {
     "disabled": "celery.backends.base.DisabledBackend",
 }
 
-_backend_cache = {}
-
 
+@memoize(100)
 def get_backend_cls(backend=None, loader=None):
     """Get backend class by name/alias"""
     backend = backend or "disabled"
     loader = loader or current_app.loader
-    if backend not in _backend_cache:
-        aliases = dict(BACKEND_ALIASES, **loader.override_backends)
-        try:
-            _backend_cache[backend] = get_cls_by_name(backend, aliases)
-        except ValueError, exc:
-            raise ValueError("Unknown result backend: %r.  "
-                             "Did you spell it correctly?  (%s)" % (backend,
-                                                                    exc))
-    return _backend_cache[backend]
+    aliases = dict(BACKEND_ALIASES, **loader.override_backends)
+    try:
+        return get_cls_by_name(backend, aliases)
+    except ValueError, exc:
+        raise ValueError("Unknown result backend: %r.  "
+                         "Did you spell it correctly?  (%s)" % (backend, exc))
 
 
 # deprecate this.
-default_backend = LocalProxy(lambda: current_app.backend)
+default_backend = Proxy(lambda: current_app.backend)

+ 20 - 13
celery/backends/amqp.py

@@ -37,9 +37,11 @@ class AMQPBackend(BaseDictBackend):
 
     BacklogLimitExceeded = BacklogLimitExceeded
 
+    supports_native_join = True
+
     def __init__(self, connection=None, exchange=None, exchange_type=None,
             persistent=None, serializer=None, auto_delete=True,
-            expires=None, **kwargs):
+            **kwargs):
         super(AMQPBackend, self).__init__(**kwargs)
         conf = self.app.conf
         self._connection = connection
@@ -56,11 +58,19 @@ class AMQPBackend(BaseDictBackend):
                                       auto_delete=auto_delete)
         self.serializer = serializer or conf.CELERY_RESULT_SERIALIZER
         self.auto_delete = auto_delete
-        self.expires = (conf.CELERY_AMQP_TASK_RESULT_EXPIRES if expires is None
-                                                             else expires)
-        if self.expires is not None:
-            self.expires = self.prepare_expires(self.expires)
-            # x-expires requires RabbitMQ 2.1.0 or higher.
+
+        # AMQP_TASK_RESULT_EXPIRES setting is deprecated and will be
+        # removed in version 3.0.
+        dexpires = conf.CELERY_AMQP_TASK_RESULT_EXPIRES
+
+        self.expires = None
+        if "expires" in kwargs:
+            if kwargs["expires"] is not None:
+                self.expires = self.prepare_expires(kwargs["expires"])
+        else:
+            self.expires = self.prepare_expires(dexpires)
+
+        if self.expires:
             self.queue_arguments["x-expires"] = int(self.expires * 1000)
         self.mutex = threading.Lock()
 
@@ -84,12 +94,10 @@ class AMQPBackend(BaseDictBackend):
 
     def _publish_result(self, connection, task_id, meta):
         # cache single channel
-        if hasattr(connection, "_result_producer_chan") and \
-                connection._result_producer_chan is not None and \
-                connection._result_producer_chan.connection is not None:
-            channel = connection._result_producer_chan
-        else:
-            channel = connection._result_producer_chan = connection.channel()
+        if connection._default_channel is not None and \
+                connection._default_channel.connection is None:
+            connection.maybe_close_channel(connection._default_channel)
+        channel = connection.default_channel
 
         self._create_producer(task_id, channel).publish(meta)
 
@@ -104,7 +112,6 @@ class AMQPBackend(BaseDictBackend):
             with self.app.pool.acquire(block=True) as conn:
 
                 def errback(error, delay):
-                    conn._result_producer_chan = None
                     print("Couldn't send result for %r: %r. Retry in %rs." % (
                             task_id, error, delay))
 

+ 22 - 9
celery/backends/base.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """celery.backends.base"""
 from __future__ import absolute_import
 
@@ -9,14 +10,16 @@ from datetime import timedelta
 from kombu import serialization
 
 from .. import states
-from ..datastructures import LocalCache
+from ..datastructures import LRUCache
 from ..exceptions import TimeoutError, TaskRevokedError
 from ..utils import timeutils
+from ..utils.encoding import from_utf8
 from ..utils.serialization import (get_pickled_exception,
                                    get_pickleable_exception,
                                    create_exception_cls)
 
 EXCEPTION_ABLE_CODECS = frozenset(["pickle", "yaml"])
+is_py3k = sys.version_info >= (3, 0)
 
 
 def unpickle_backend(cls, args, kwargs):
@@ -37,6 +40,9 @@ class BaseBackend(object):
     #: argument which is for each pass.
     subpolling_interval = None
 
+    #: If true the backend must implement :meth:`get_many`.
+    supports_native_join = False
+
     def __init__(self, *args, **kwargs):
         from ..app import app_or_default
         self.app = app_or_default(kwargs.get("app"))
@@ -51,7 +57,8 @@ class BaseBackend(object):
         return payload
 
     def decode(self, payload):
-        return serialization.decode(str(payload),
+        payload = is_py3k and payload or str(payload)
+        return serialization.decode(payload,
                                     content_type=self.content_type,
                                     content_encoding=self.content_encoding)
 
@@ -108,7 +115,7 @@ class BaseBackend(object):
         """Convert serialized exception to Python exception."""
         if self.serializer in EXCEPTION_ABLE_CODECS:
             return get_pickled_exception(exc)
-        return create_exception_cls(exc["exc_type"].encode("utf-8"),
+        return create_exception_cls(from_utf8(exc["exc_type"]),
                                     sys.modules[__name__])
 
     def prepare_value(self, result):
@@ -199,8 +206,9 @@ class BaseBackend(object):
     def on_chord_part_return(self, task):
         pass
 
-    def on_chord_apply(self, setid, body, *args, **kwargs):
+    def on_chord_apply(self, setid, body, result=None, **kwargs):
         from ..registry import tasks
+        kwargs["result"] = [r.task_id for r in result]
         tasks["celery.chord_unlock"].apply_async((setid, body, ), kwargs,
                                                  countdown=1)
 
@@ -212,7 +220,7 @@ class BaseDictBackend(BaseBackend):
 
     def __init__(self, *args, **kwargs):
         super(BaseDictBackend, self).__init__(*args, **kwargs)
-        self._cache = LocalCache(limit=kwargs.get("max_cached_results") or
+        self._cache = LRUCache(limit=kwargs.get("max_cached_results") or
                                  self.app.conf.CELERY_MAX_CACHED_RESULTS)
 
     def store_result(self, task_id, result, status, traceback=None, **kwargs):
@@ -245,11 +253,11 @@ class BaseDictBackend(BaseBackend):
             return meta["result"]
 
     def get_task_meta(self, task_id, cache=True):
-        if cache and task_id in self._cache:
+        if cache:
             try:
                 return self._cache[task_id]
             except KeyError:
-                pass   # backend emptied in the meantime
+                pass
 
         meta = self._get_task_meta_for(task_id)
         if cache and meta.get("status") == states.SUCCESS:
@@ -264,11 +272,11 @@ class BaseDictBackend(BaseBackend):
                                                         cache=False)
 
     def get_taskset_meta(self, taskset_id, cache=True):
-        if cache and taskset_id in self._cache:
+        if cache:
             try:
                 return self._cache[taskset_id]
             except KeyError:
-                pass  # backend emptied in the meantime
+                pass
 
         meta = self._restore_taskset(taskset_id)
         if cache and meta is not None:
@@ -346,6 +354,7 @@ class KeyValueStoreBackend(BaseDictBackend):
                     cached_ids.add(task_id)
 
         ids ^= cached_ids
+        iterations = 0
         while ids:
             keys = list(ids)
             r = self._mget_to_results(self.mget([self.get_key_for_task(k)
@@ -354,7 +363,10 @@ class KeyValueStoreBackend(BaseDictBackend):
             ids ^= set(r.keys())
             for key, value in r.iteritems():
                 yield key, value
+            if timeout and iterations * interval >= timeout:
+                raise TimeoutError("Operation timed out (%s)" % (timeout, ))
             time.sleep(interval)  # don't busy loop.
+            iterations += 0
 
     def _forget(self, task_id):
         self.delete(self.get_key_for_task(task_id))
@@ -387,6 +399,7 @@ class KeyValueStoreBackend(BaseDictBackend):
 
 
 class DisabledBackend(BaseBackend):
+    _cache = {}   # need this attribute to reset cache in tests.
 
     def store_result(self, *args, **kwargs):
         pass

+ 16 - 5
celery/backends/cache.py

@@ -1,8 +1,10 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
-from ..datastructures import LocalCache
+from ..datastructures import LRUCache
 from ..exceptions import ImproperlyConfigured
 from ..utils import cached_property
+from ..utils.encoding import ensure_bytes
 
 from .base import KeyValueStoreBackend
 
@@ -38,7 +40,7 @@ def get_best_memcache(*args, **kwargs):
 class DummyClient(object):
 
     def __init__(self, *args, **kwargs):
-        self.cache = LocalCache(5000)
+        self.cache = LRUCache(limit=5000)
 
     def get(self, key, *args, **kwargs):
         return self.cache.get(key)
@@ -61,6 +63,8 @@ backends = {"memcache": lambda: get_best_memcache,
 
 
 class CacheBackend(KeyValueStoreBackend):
+    servers = None
+    supports_native_join = True
 
     def __init__(self, expires=None, backend=None, options={}, **kwargs):
         super(CacheBackend, self).__init__(self, **kwargs)
@@ -68,10 +72,11 @@ class CacheBackend(KeyValueStoreBackend):
         self.options = dict(self.app.conf.CELERY_CACHE_BACKEND_OPTIONS,
                             **options)
 
-        backend = backend or self.app.conf.CELERY_CACHE_BACKEND
-        self.backend, _, servers = backend.partition("://")
+        self.backend = backend or self.app.conf.CELERY_CACHE_BACKEND
+        if self.backend:
+            self.backend, _, servers = self.backend.partition("://")
+            self.servers = servers.rstrip('/').split(";")
         self.expires = self.prepare_expires(expires, type=int)
-        self.servers = servers.rstrip('/').split(";")
         try:
             self.Client = backends[self.backend]()
         except KeyError:
@@ -80,6 +85,12 @@ class CacheBackend(KeyValueStoreBackend):
                     "following backends: %s" % (self.backend,
                                                 ", ".join(backends.keys())))
 
+    def get_key_for_task(self, task_id):
+        return ensure_bytes(self.task_keyprefix) + ensure_bytes(task_id)
+
+    def get_key_for_taskset(self, taskset_id):
+        return ensure_bytes(self.taskset_keyprefix) + ensure_bytes(taskset_id)
+
     def get(self, key):
         return self.client.get(key)
 

+ 1 - 0
celery/backends/cassandra.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """celery.backends.cassandra"""
 from __future__ import absolute_import
 

+ 1 - 0
celery/backends/database.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 from datetime import datetime

+ 2 - 0
celery/backends/mongodb.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """MongoDB backend for celery."""
 from __future__ import absolute_import
 
@@ -14,6 +15,7 @@ from ..utils.timeutils import maybe_timedelta
 
 from .base import BaseDictBackend
 
+
 class Bunch:
 
     def __init__(self, **kw):

+ 1 - 0
celery/backends/pyredis.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 This is here for backwards compatibility only.
 

+ 7 - 6
celery/backends/redis.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 from ..exceptions import ImproperlyConfigured
@@ -31,6 +32,8 @@ class RedisBackend(KeyValueStoreBackend):
     #: default Redis password (:const:`None`)
     password = None
 
+    supports_native_join = True
+
     def __init__(self, host=None, port=None, db=None, password=None,
             expires=None, **kwargs):
         super(RedisBackend, self).__init__(**kwargs)
@@ -40,9 +43,9 @@ class RedisBackend(KeyValueStoreBackend):
                     "You need to install the redis library in order to use "
                   + "Redis result store backend.")
 
-        # For compatability with the old REDIS_* configuration keys.
+        # For compatibility with the old REDIS_* configuration keys.
         def _get(key):
-            for prefix in "REDIS_%s", "CELERY_REDIS_%s":
+            for prefix in "CELERY_REDIS_%s", "REDIS_%s":
                 try:
                     return conf[prefix % key]
                 except KeyError:
@@ -69,10 +72,8 @@ class RedisBackend(KeyValueStoreBackend):
     def delete(self, key):
         self.client.delete(key)
 
-    def process_cleanup(self):
-        pass
-
-    def on_chord_apply(self, *args, **kwargs):
+    def on_chord_apply(self, setid, body, result=None, **kwargs):
+        self.app.TaskSetResult(setid, result).save()
         pass
 
     def on_chord_part_return(self, task, propagate=False,

+ 1 - 0
celery/backends/tyrant.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """celery.backends.tyrant"""
 from __future__ import absolute_import
 

+ 20 - 9
celery/beat.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.beat
+    ~~~~~~~~~~~
+
+    The Celery periodic task scheduler.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 import errno
@@ -79,13 +90,13 @@ class ScheduleEntry(object):
     def _default_now(self):
         return datetime.utcnow()
 
-    def next(self, last_run_at=None):
+    def _next_instance(self, last_run_at=None):
         """Returns a new instance of the same class, but with
         its date and count fields updated."""
         return self.__class__(**dict(self,
                                 last_run_at=last_run_at or datetime.utcnow(),
                                 total_run_count=self.total_run_count + 1))
-    __next__ = next  # for 2to3
+    __next__ = next = _next_instance  # for 2to3
 
     def update(self, other):
         """Update values from another entry.
@@ -372,8 +383,8 @@ class Service(object):
         self.schedule_filename = schedule_filename or \
                                     app.conf.CELERYBEAT_SCHEDULE_FILENAME
 
-        self._shutdown = threading.Event()
-        self._stopped = threading.Event()
+        self._is_shutdown = threading.Event()
+        self._is_stopped = threading.Event()
         self.debug = SilenceRepeated(self.logger.debug,
                         10 if self.max_interval < 60 else 1)
 
@@ -388,24 +399,24 @@ class Service(object):
             platforms.set_process_title("celerybeat")
 
         try:
-            while not self._shutdown.isSet():
+            while not self._is_shutdown.isSet():
                 interval = self.scheduler.tick()
                 self.debug("Celerybeat: Waking up %s." % (
                         humanize_seconds(interval, prefix="in ")))
                 time.sleep(interval)
         except (KeyboardInterrupt, SystemExit):
-            self._shutdown.set()
+            self._is_shutdown.set()
         finally:
             self.sync()
 
     def sync(self):
         self.scheduler.close()
-        self._stopped.set()
+        self._is_stopped.set()
 
     def stop(self, wait=False):
         self.logger.info("Celerybeat: Shutting down...")
-        self._shutdown.set()
-        wait and self._stopped.wait()  # block until shutdown done.
+        self._is_shutdown.set()
+        wait and self._is_stopped.wait()  # block until shutdown done.
 
     def get_scheduler(self, lazy=False):
         filename = self.schedule_filename

+ 24 - 6
celery/bin/base.py

@@ -1,11 +1,19 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 import os
 import sys
+import warnings
 
 from optparse import OptionParser, make_option as Option
 
 from .. import __version__, Celery
+from ..exceptions import CDeprecationWarning, CPendingDeprecationWarning
+
+
+# always enable DeprecationWarnings, so our users can see them.
+for warning in (CDeprecationWarning, CPendingDeprecationWarning):
+    warnings.simplefilter("once", warning, 0)
 
 
 class Command(object):
@@ -100,7 +108,7 @@ class Command(object):
         options, args = self.parse_options(prog_name, argv)
         if not self.supports_args and args:
             sys.stderr.write(
-                "\nUnrecognized command line arguments: %r\n" % (
+                "\nUnrecognized command line arguments: %s\n" % (
                     ", ".join(args), ))
             sys.stderr.write("\nTry --help?\n")
             sys.exit(1)
@@ -124,18 +132,28 @@ class Command(object):
                            option_list=(self.preload_options +
                                         self.get_options()))
 
+    def prepare_preload_options(self, options):
+        """Optional handler to do additional processing of preload options.
+
+        Configuration must not have been initialized
+        until after this is called.
+
+        """
+        pass
+
     def setup_app_from_commandline(self, argv):
         preload_options = self.parse_preload_options(argv)
-        app = (preload_options.pop("app", None) or
+        self.prepare_preload_options(preload_options)
+        app = (preload_options.get("app") or
                os.environ.get("CELERY_APP") or
                self.app)
-        loader = (preload_options.pop("loader", None) or
+        loader = (preload_options.get("loader") or
                   os.environ.get("CELERY_LOADER") or
                   "default")
-        broker = preload_options.pop("broker", None)
+        broker = preload_options.get("broker", None)
         if broker:
             os.environ["CELERY_BROKER_URL"] = broker
-        config_module = preload_options.pop("config_module", None)
+        config_module = preload_options.get("config_module")
         if config_module:
             os.environ["CELERY_CONFIG_MODULE"] = config_module
         if app:
@@ -186,7 +204,7 @@ class Command(object):
         return Celery(*args, **kwargs)
 
 
-def daemon_options(default_pidfile, default_logfile=None):
+def daemon_options(default_pidfile=None, default_logfile=None):
     return (
         Option('-f', '--logfile', default=default_logfile,
                action="store", dest="logfile",

+ 1 - 1
celery/bin/camqadm.py

@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+# -*- coding: utf-8 -*-
 """camqadm
 
 .. program:: camqadm

+ 13 - 6
celery/bin/celerybeat.py

@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+# -*- coding: utf-8 -*-
 """celerybeat
 
 .. program:: celerybeat
@@ -25,6 +25,8 @@
 from __future__ import with_statement
 from __future__ import absolute_import
 
+import os
+
 from functools import partial
 
 from ..platforms import detached
@@ -34,13 +36,15 @@ from .base import Command, Option, daemon_options
 
 class BeatCommand(Command):
     supports_args = False
+    preload_options = (Command.preload_options
+                     + daemon_options(default_pidfile="celerybeat.pid"))
 
     def run(self, detach=False, logfile=None, pidfile=None, uid=None,
             gid=None, umask=None, working_directory=None, **kwargs):
+        workdir = working_directory
         kwargs.pop("app", None)
         beat = partial(self.app.Beat,
                        logfile=logfile, pidfile=pidfile, **kwargs)
-        workdir = working_directory
 
         if detach:
             with detached(logfile, pidfile, uid, gid, umask, workdir):
@@ -48,6 +52,11 @@ class BeatCommand(Command):
         else:
             return beat().run()
 
+    def prepare_preload_options(self, options):
+        workdir = options.get("working_directory")
+        if workdir:
+            os.chdir(workdir)
+
     def get_options(self):
         conf = self.app.conf
 
@@ -62,7 +71,7 @@ class BeatCommand(Command):
                     "'.db' will be appended to the filename. Default: %s" % (
                             conf.CELERYBEAT_SCHEDULE_FILENAME, )),
             Option('--max-interval',
-                default=3600.0, type="float", dest="max_interval",
+                default=None, type="float", dest="max_interval",
                 help="Max. seconds to sleep between schedule iterations."),
             Option('-S', '--scheduler',
                 default=None,
@@ -72,9 +81,7 @@ class BeatCommand(Command):
             Option('-l', '--loglevel',
                 default=conf.CELERYBEAT_LOG_LEVEL,
                 action="store", dest="loglevel",
-                help="Loglevel. One of DEBUG/INFO/WARNING/ERROR/CRITICAL."),
-        ) + daemon_options(default_pidfile="celerybeat.pid",
-                           default_logfile=conf.CELERYBEAT_LOG_FILE)
+                help="Loglevel. One of DEBUG/INFO/WARNING/ERROR/CRITICAL."))
 
 
 def main():

+ 8 - 3
celery/bin/celeryctl.py

@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
@@ -220,7 +222,8 @@ class inspect(Command):
                "reserved": 1.0,
                "stats": 1.0,
                "revoked": 1.0,
-               "registered_tasks": 1.0,
+               "registered_tasks": 1.0,  # alias to registered
+               "registered": 1.0,
                "enable_events": 1.0,
                "disable_events": 1.0,
                "ping": 0.2,
@@ -232,6 +235,7 @@ class inspect(Command):
                     help="Timeout in seconds (float) waiting for reply"),
                 Option("--destination", "-d", dest="destination",
                     help="Comma separated list of destination node names."))
+    show_body = True
 
     def usage(self, command):
         return "%%prog %s [options] %s [%s]" % (
@@ -239,6 +243,7 @@ class inspect(Command):
 
     def run(self, *args, **kwargs):
         self.quiet = kwargs.get("quiet", False)
+        self.show_body = kwargs.get("show_body", True)
         if not args:
             raise Error("Missing inspect command. See --help")
         command = args[0]
@@ -274,7 +279,7 @@ class inspect(Command):
             return
         dirstr = not self.quiet and c.bold(c.white(direction), " ") or ""
         self.out(c.reset(dirstr, title))
-        if body and not self.quiet:
+        if body and self.show_body:
             self.out(body)
 inspect = command(inspect)
 
@@ -290,7 +295,7 @@ class status(Command):
     def run(self, *args, **kwargs):
         replies = inspect(app=self.app,
                           no_color=kwargs.get("no_color", False)) \
-                    .run("ping", **dict(kwargs, quiet=True))
+                    .run("ping", **dict(kwargs, quiet=True, show_body=False))
         if not replies:
             raise Error("No nodes replied within time constraint")
         nodecount = len(replies)

+ 3 - 2
celery/bin/celeryd.py

@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+# -*- coding: utf-8 -*-
 """celeryd
 
 .. program:: celeryd
@@ -108,7 +108,8 @@ class WorkerCommand(Command):
                 default=conf.CELERYD_POOL,
                 action="store", dest="pool", type="str",
                 help="Pool implementation: "
-                     "processes (default), eventlet or gevent."),
+                     "processes (default), eventlet, gevent, "
+                     "solo or threads."),
             Option('--purge', '--discard', default=False,
                 action="store_true", dest="discard",
                 help="Discard all waiting tasks before the server is"

+ 1 - 0
celery/bin/celeryd_detach.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 from __future__ import with_statement
 

+ 4 - 2
celery/bin/celeryd_multi.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 
 Examples
@@ -89,7 +90,6 @@ from __future__ import absolute_import
 
 import errno
 import os
-import shlex
 import signal
 import socket
 import sys
@@ -99,7 +99,9 @@ from subprocess import Popen
 from time import sleep
 
 from .. import __version__
+from ..platforms import shellsplit
 from ..utils import term
+from ..utils.encoding import from_utf8
 
 SIGNAMES = set(sig for sig in dir(signal)
                         if sig.startswith("SIG") and "_" not in sig)
@@ -368,7 +370,7 @@ class MultiTool(object):
 
     def waitexec(self, argv, path=sys.executable):
         args = " ".join([path] + list(argv))
-        argstr = shlex.split(args.encode("utf-8"))
+        argstr = shellsplit(from_utf8(args))
         pipe = Popen(argstr, env=self.env)
         self.info("  %s" % " ".join(argstr))
         retcode = pipe.wait()

+ 10 - 3
celery/bin/celeryev.py

@@ -1,6 +1,8 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 from __future__ import with_statement
 
+import os
 import sys
 
 from functools import partial
@@ -13,6 +15,8 @@ from .base import Command, Option, daemon_options
 
 class EvCommand(Command):
     supports_args = False
+    preload_options = (Command.preload_options
+                     + daemon_options(default_pidfile="celeryev.pid"))
 
     def run(self, dump=False, camera=None, frequency=1.0, maxrate=None,
             loglevel="INFO", logfile=None, prog_name="celeryev",
@@ -31,6 +35,11 @@ class EvCommand(Command):
                                   detach=detach)
         return self.run_evtop()
 
+    def prepare_preload_options(self, options):
+        workdir = options.get("working_directory")
+        if workdir:
+            os.chdir(workdir)
+
     def run_evdump(self):
         from ..events.dumper import evdump
         self.set_process_status("dump")
@@ -82,9 +91,7 @@ class EvCommand(Command):
                    help="Recording: Shutter rate limit (e.g. 10/m)"),
             Option('-l', '--loglevel',
                    action="store", dest="loglevel", default="INFO",
-                   help="Loglevel. Default is WARNING."),
-        ) + daemon_options(default_pidfile="celeryev.pid",
-                           default_logfile=None)
+                   help="Loglevel. Default is WARNING."))
 
 
 def main():

+ 3 - 2
celery/concurrency/__init__.py

@@ -1,11 +1,12 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 from ..utils import get_cls_by_name
 
 ALIASES = {
     "processes": "celery.concurrency.processes.TaskPool",
-    "eventlet": "celery.concurrency.evlet.TaskPool",
-    "gevent": "celery.concurrency.evg.TaskPool",
+    "eventlet": "celery.concurrency.eventlet.TaskPool",
+    "gevent": "celery.concurrency.gevent.TaskPool",
     "threads": "celery.concurrency.threads.TaskPool",
     "solo": "celery.concurrency.solo.TaskPool",
 }

+ 10 - 4
celery/concurrency/base.py

@@ -1,5 +1,7 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
+import logging
 import os
 import sys
 import time
@@ -10,6 +12,7 @@ from functools import partial
 from .. import log
 from ..datastructures import ExceptionInfo
 from ..utils import timer2
+from ..utils.encoding import safe_repr
 
 
 def apply_target(target, args=(), kwargs={}, callback=None,
@@ -27,6 +30,7 @@ class BasePool(object):
     Timer = timer2.Timer
 
     signal_safe = True
+    rlimit_safe = True
     is_green = False
 
     _state = None
@@ -37,6 +41,7 @@ class BasePool(object):
         self.putlocks = putlocks
         self.logger = logger or log.get_default_logger()
         self.options = options
+        self.does_debug = self.logger.isEnabledFor(logging.DEBUG)
 
     def on_start(self):
         pass
@@ -82,8 +87,9 @@ class BasePool(object):
         on_ready = partial(self.on_ready, callback, errback)
         on_worker_error = partial(self.on_worker_error, errback)
 
-        self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)",
-                          target, args, kwargs)
+        if self.does_debug:
+            self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)",
+                            target, safe_repr(args), safe_repr(kwargs))
 
         return self.on_apply(target, args, kwargs,
                              callback=on_ready,
@@ -106,8 +112,8 @@ class BasePool(object):
         else:
             self.safe_apply_callback(callback, ret_value)
 
-    def on_worker_error(self, errback, exc):
-        errback(ExceptionInfo((exc.__class__, exc, None)))
+    def on_worker_error(self, errback, exc_info):
+        errback(exc_info)
 
     def safe_apply_callback(self, fun, *args):
         if fun:

+ 1 - 0
celery/concurrency/evlet.py → celery/concurrency/eventlet.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 import os

+ 2 - 0
celery/concurrency/evg.py → celery/concurrency/gevent.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 import os
@@ -87,6 +88,7 @@ class TaskPool(BasePool):
     Timer = Timer
 
     signal_safe = False
+    rlimit_safe = False
     is_green = True
 
     def __init__(self, *args, **kwargs):

+ 1 - 5
celery/concurrency/processes/__init__.py

@@ -1,8 +1,4 @@
-"""
-
-Process Pools.
-
-"""
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 import platform

+ 3 - 2
celery/concurrency/processes/_win.py

@@ -1,6 +1,7 @@
-import os
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
 
-__all__ = ["get_processtree_pids", "kill_processtree"]
+import os
 
 # psutil is painfully slow in win32. So to avoid adding big
 # dependencies like pywin32 a ctypes based solution is preferred

+ 57 - 19
celery/concurrency/processes/pool.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 #
 # Module providing the `Pool` class for managing a process pool
 #
@@ -5,8 +6,7 @@
 #
 # Copyright (c) 2007-2008, R Oudkerk --- see COPYING.txt
 #
-
-__all__ = ['Pool']
+from __future__ import absolute_import
 
 #
 # Imports
@@ -22,11 +22,13 @@ import collections
 import time
 import signal
 import warnings
+import logging
 
 from multiprocessing import Process, cpu_count, TimeoutError
 from multiprocessing import util
 from multiprocessing.util import Finalize, debug
 
+from celery.datastructures import ExceptionInfo
 from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded
 from celery.exceptions import WorkerLostError
 
@@ -74,16 +76,30 @@ class LaxBoundedSemaphore(threading._Semaphore):
         _Semaphore.__init__(self, value, verbose)
         self._initial_value = value
 
-    def release(self):
-        if self._Semaphore__value < self._initial_value:
-            _Semaphore.release(self)
-        if __debug__:
-            self._note("%s.release: success, value=%s (unchanged)" % (
-                self, self._Semaphore__value))
+    if sys.version_info >= (3, 0):
+
+        def release(self):
+            if self._value < self._initial_value:
+                _Semaphore.release(self)
+            if __debug__:
+                self._note("%s.release: success, value=%s (unchanged)" % (
+                    self, self._value))
+
+        def clear(self):
+            while self._value < self._initial_value:
+                _Semaphore.release(self)
+    else:
+
+        def release(self):  # noqa
+            if self._Semaphore__value < self._initial_value:
+                _Semaphore.release(self)
+            if __debug__:
+                self._note("%s.release: success, value=%s (unchanged)" % (
+                    self, self._Semaphore__value))
 
-    def clear(self):
-        while self._Semaphore__value < self._initial_value:
-            _Semaphore.release(self)
+        def clear(self):  # noqa
+            while self._Semaphore__value < self._initial_value:
+                _Semaphore.release(self)
 
 #
 # Exceptions
@@ -119,6 +135,17 @@ def soft_timeout_sighandler(signum, frame):
 
 
 def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None):
+    # Re-init logging system.
+    # Workaround for http://bugs.python.org/issue6721#msg140215
+    # Python logging module uses RLock() objects which are broken after
+    # fork. This can result in a deadlock (Issue #496).
+    logger_names = logging.Logger.manager.loggerDict.keys()
+    logger_names.append(None)  # for root logger
+    for name in logger_names:
+        for handler in logging.getLogger(name).handlers:
+            handler.createLock()
+    logging._lock = threading.RLock()
+
     pid = os.getpid()
     assert maxtasks is None or (type(maxtasks) == int and maxtasks > 0)
     put = outqueue.put
@@ -166,13 +193,15 @@ def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None):
         put((ACK, (job, i, time.time(), pid)))
         try:
             result = (True, func(*args, **kwds))
-        except Exception, e:
-            result = (False, e)
+        except Exception:
+            result = (False, ExceptionInfo(sys.exc_info()))
         try:
             put((READY, (job, i, result)))
         except Exception, exc:
+            _, _, tb = sys.exc_info()
             wrapped = MaybeEncodingError(exc, result[1])
-            put((READY, (job, i, (False, wrapped))))
+            einfo = ExceptionInfo((MaybeEncodingError, wrapped, tb))
+            put((READY, (job, i, (False, einfo))))
 
         completed += 1
     debug('worker exiting after %d tasks' % completed)
@@ -320,7 +349,12 @@ class TimeoutHandler(PoolThread):
                 return
             debug('hard time limit exceeded for %i', i)
             # Remove from cache and set return value to an exception
-            job._set(i, (False, TimeLimitExceeded(hard_timeout)))
+            exc_info = None
+            try:
+                raise TimeLimitExceeded(hard_timeout)
+            except TimeLimitExceeded:
+                exc_info = sys.exc_info()
+            job._set(i, (False, ExceptionInfo(exc_info)))
 
             # Remove from _pool
             process, _index = _process_by_pid(job._worker_pid)
@@ -408,7 +442,7 @@ class ResultHandler(PoolThread):
         debug('result handler starting')
         while 1:
             try:
-                ready, task = poll(0.2)
+                ready, task = poll(1.0)
             except (IOError, EOFError), exc:
                 debug('result handler got %r -- exiting' % (exc, ))
                 return
@@ -428,7 +462,7 @@ class ResultHandler(PoolThread):
         time_terminate = None
         while cache and self._state != TERMINATE:
             try:
-                ready, task = poll(0.2)
+                ready, task = poll(1.0)
             except (IOError, EOFError), exc:
                 debug('result handler got %r -- exiting' % (exc, ))
                 return
@@ -571,8 +605,12 @@ class Pool(object):
                 if not job.ready() and job._worker_lost]:
             now = now or time.time()
             if now - job._worker_lost > lost_worker_timeout:
-                err = WorkerLostError("Worker exited prematurely.")
-                job._set(None, (False, err))
+                exc_info = None
+                try:
+                    raise WorkerLostError("Worker exited prematurely.")
+                except WorkerLostError:
+                    exc_info = ExceptionInfo(sys.exc_info())
+                job._set(None, (False, exc_info))
 
         if shutdown and not len(self._pool):
             raise WorkersJoined()

+ 1 - 0
celery/concurrency/solo.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 import os

+ 12 - 0
celery/concurrency/threads.py

@@ -1,8 +1,17 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
+from UserDict import UserDict
+
 from .base import apply_target, BasePool
 
 
+class NullDict(UserDict):
+
+    def __setitem__(self, key, value):
+        pass
+
+
 class TaskPool(BasePool):
 
     def __init__(self, *args, **kwargs):
@@ -17,6 +26,9 @@ class TaskPool(BasePool):
 
     def on_start(self):
         self._pool = self.ThreadPool(self.limit)
+        # threadpool stores all work requests until they are processed
+        # we don't need this dict, and it occupies way too much memory.
+        self._pool.workRequests = NullDict()
 
     def on_stop(self):
         self._pool.dismissWorkers(self.limit, do_join=True)

+ 1 - 0
celery/conf.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 
 **DEPRECATED**

+ 3 - 0
celery/contrib/abortable.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 =========================
 Abortable tasks overview
@@ -78,6 +79,8 @@ have it block until the task is finished.
    database backends.
 
 """
+from __future__ import absolute_import
+
 from celery.task.base import Task
 from celery.result import AsyncResult
 

+ 28 - 2
celery/contrib/batches.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 celery.contrib.batches
 ======================
@@ -39,15 +40,40 @@ Registering the click is done as follows:
 :license: BSD, see LICENSE for more details.
 
 """
+from __future__ import absolute_import
+
 from itertools import count
-from Queue import Queue
+from Queue import Empty, Queue
 
-from celery.datastructures import consume_queue
 from celery.task import Task
 from celery.utils import cached_property, timer2
 from celery.worker import state
 
 
+def consume_queue(queue):
+    """Iterator yielding all immediately available items in a
+    :class:`Queue.Queue`.
+
+    The iterator stops as soon as the queue raises :exc:`Queue.Empty`.
+
+    *Examples*
+
+        >>> q = Queue()
+        >>> map(q.put, range(4))
+        >>> list(consume_queue(q))
+        [0, 1, 2, 3]
+        >>> list(consume_queue(q))
+        []
+
+    """
+    get = queue.get_nowait
+    while 1:
+        try:
+            yield get()
+        except Empty:
+            break
+
+
 def apply_batches_task(task, args, loglevel, logfile):
     task.request.update({"loglevel": loglevel, "logfile": logfile})
     try:

+ 2 - 0
celery/contrib/rdb.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 celery.contrib.rdb
 ==================
@@ -36,6 +37,7 @@ Inspired by http://snippets.dzone.com/posts/show/7248
 :license: BSD, see LICENSE for more details.
 
 """
+from __future__ import absolute_import
 
 import errno
 import os

+ 115 - 54
celery/datastructures.py

@@ -1,24 +1,25 @@
+# -*- coding: utf-8 -*-
 """
-celery.datastructures
-=====================
+    celery.datastructures
+    ~~~~~~~~~~~~~~~~~~~~~
 
-Custom data structures.
+    Custom types and data structures.
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 """
 from __future__ import absolute_import
 from __future__ import with_statement
 
+import sys
 import time
 import traceback
 
 from itertools import chain
-from Queue import Empty
 from threading import RLock
 
-from .utils.compat import OrderedDict
+from .utils.compat import UserDict, OrderedDict
 
 
 class AttributeDictMixin(object):
@@ -81,8 +82,16 @@ class DictAttribute(object):
     def __contains__(self, key):
         return hasattr(self.obj, key)
 
-    def iteritems(self):
+    def _iterate_items(self):
         return vars(self.obj).iteritems()
+    iteritems = _iterate_items
+
+    if sys.version_info >= (3, 0):
+        items = _iterate_items
+    else:
+
+        def items(self):
+            return list(self._iterate_items())
 
 
 class ConfigurationView(AttributeDictMixin):
@@ -147,23 +156,55 @@ class ConfigurationView(AttributeDictMixin):
         # changes takes precedence.
         return chain(*[op(d) for d in reversed(self._order)])
 
-    def iterkeys(self):
+    def _iterate_keys(self):
         return self._iter(lambda d: d.iterkeys())
+    iterkeys = _iterate_keys
 
-    def iteritems(self):
+    def _iterate_items(self):
         return self._iter(lambda d: d.iteritems())
+    iteritems = _iterate_items
 
-    def itervalues(self):
+    def _iterate_values(self):
         return self._iter(lambda d: d.itervalues())
+    itervalues = _iterate_values
 
     def keys(self):
-        return list(self.iterkeys())
+        return list(self._iterate_keys())
 
     def items(self):
-        return list(self.iteritems())
+        return list(self._iterate_items())
 
     def values(self):
-        return list(self.itervalues())
+        return list(self._iterate_values())
+
+
+class _Code(object):
+
+    def __init__(self, code):
+        self.co_filename = code.co_filename
+        self.co_name = code.co_name
+
+
+class _Frame(object):
+    Code = _Code
+
+    def __init__(self, frame):
+        self.f_globals = {
+            "__file__": frame.f_globals.get("__file__", "__main__"),
+        }
+        self.f_code = self.Code(frame.f_code)
+
+
+class Traceback(object):
+    Frame = _Frame
+
+    def __init__(self, tb):
+        self.tb_frame = self.Frame(tb.tb_frame)
+        self.tb_lineno = tb.tb_lineno
+        if tb.tb_next is None:
+            self.tb_next = None
+        else:
+            self.tb_next = Traceback(tb.tb_next)
 
 
 class ExceptionInfo(object):
@@ -174,15 +215,21 @@ class ExceptionInfo(object):
 
     """
 
-    #: The original exception.
+    #: Exception type.
+    type = None
+
+    #: Exception instance.
     exception = None
 
-    #: A traceback form the point when :attr:`exception` was raised.
+    #: Pickleable traceback instance for use with :mod:`traceback`
+    tb = None
+
+    #: String representation of the traceback.
     traceback = None
 
     def __init__(self, exc_info):
-        _, exception, _ = exc_info
-        self.exception = exception
+        self.type, self.exception, tb = exc_info
+        self.tb = Traceback(tb)
         self.traceback = ''.join(traceback.format_exception(*exc_info))
 
     def __str__(self):
@@ -191,29 +238,9 @@ class ExceptionInfo(object):
     def __repr__(self):
         return "<ExceptionInfo: %r>" % (self.exception, )
 
-
-def consume_queue(queue):
-    """Iterator yielding all immediately available items in a
-    :class:`Queue.Queue`.
-
-    The iterator stops as soon as the queue raises :exc:`Queue.Empty`.
-
-    *Examples*
-
-        >>> q = Queue()
-        >>> map(q.put, range(4))
-        >>> list(consume_queue(q))
-        [0, 1, 2, 3]
-        >>> list(consume_queue(q))
-        []
-
-    """
-    get = queue.get_nowait
-    while 1:
-        try:
-            yield get()
-        except Empty:
-            break
+    @property
+    def exc_info(self):
+        return self.type, self.exception, self.tb
 
 
 class LimitedSet(object):
@@ -291,27 +318,61 @@ class LimitedSet(object):
         return self.chronologically[0]
 
 
-class LocalCache(OrderedDict):
-    """Dictionary with a finite number of keys.
+class LRUCache(UserDict):
+    """LRU Cache implementation using a doubly linked list to track access.
 
-    Older items expires first.
+    :keyword limit: The maximum number of keys to keep in the cache.
+        When a new key is inserted and the limit has been exceeded,
+        the *Least Recently Used* key will be discarded from the
+        cache.
 
     """
 
     def __init__(self, limit=None):
-        super(LocalCache, self).__init__()
         self.limit = limit
-        self.lock = RLock()
+        self.mutex = RLock()
+        self.data = OrderedDict()
+
+    def __getitem__(self, key):
+        with self.mutex:
+            value = self[key] = self.data.pop(key)
+            return value
+
+    def keys(self):
+        # userdict.keys in py3k calls __getitem__
+        return self.data.keys()
+
+    def values(self):
+        return list(self._iterate_values())
+
+    def items(self):
+        return list(self._iterate_items())
 
     def __setitem__(self, key, value):
-        with self.lock:
-            while len(self) >= self.limit:
-                self.popitem(last=False)
-            super(LocalCache, self).__setitem__(key, value)
-
-    def pop(self, key, *args):
-        with self.lock:
-            super(LocalCache, self).pop(key, *args)
+        # remove least recently used key.
+        with self.mutex:
+            if self.limit and len(self.data) >= self.limit:
+                self.data.pop(iter(self.data).next())
+            self.data[key] = value
+
+    def __iter__(self):
+        return self.data.iterkeys()
+
+    def _iterate_items(self):
+        for k in self.data:
+            try:
+                yield (k, self.data[k])
+            except KeyError:
+                pass
+    iteritems = _iterate_items
+
+    def _iterate_values(self):
+        for k in self.data:
+            try:
+                yield self.data[k]
+            except KeyError:
+                pass
+    itervalues = _iterate_values
 
 
 class TokenBucket(object):

+ 3 - 0
celery/db/a805d4bd.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 a805d4bd
 This module fixes a bug with pickling and relative imports in Python < 2.6.
@@ -19,6 +20,8 @@ Hence the random module name "a805d5bd" is taken to decrease the chances of
 a collision.
 
 """
+from __future__ import absolute_import
+
 from sqlalchemy.types import PickleType as _PickleType
 
 

+ 3 - 0
celery/db/dfd042c7.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 """
 dfd042c7
 
@@ -5,6 +6,8 @@ SQLAlchemy 0.5.8 version of a805d4bd, see the docstring of that module
 for an explanation of this workaround.
 
 """
+from __future__ import absolute_import
+
 from sqlalchemy.types import PickleType as _PickleType
 from sqlalchemy import util
 

+ 1 - 0
celery/db/models.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 from datetime import datetime

+ 3 - 0
celery/db/session.py

@@ -1,3 +1,6 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
 from collections import defaultdict
 
 from sqlalchemy import create_engine

+ 2 - 1
celery/decorators.py

@@ -17,9 +17,10 @@ from __future__ import absolute_import
 import warnings
 
 from . import task as _task
+from .exceptions import CDeprecationWarning
 
 
-warnings.warn(PendingDeprecationWarning("""
+warnings.warn(CDeprecationWarning("""
 The `celery.decorators` module and the magic keyword arguments
 are pending deprecation and will be deprecated in 2.4, then removed
 in 3.0.

+ 13 - 0
celery/events/__init__.py

@@ -1,3 +1,16 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.events
+    ~~~~~~~~~~~~~
+
+    Events are messages sent for actions happening
+    in the worker (and clients if :setting:`CELERY_SEND_TASK_SENT_EVENT`
+    is enabled), used for monitoring purposes.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import with_statement
 

+ 11 - 0
celery/events/cursesmon.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.events.cursesmon
+    ~~~~~~~~~~~~~~~~~~~~~~~
+
+    Graphical monitor of Celery events using curses.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 import curses

+ 15 - 3
celery/events/dumper.py

@@ -1,3 +1,15 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.events.dumper
+    ~~~~~~~~~~~~~~~~~~~~
+
+    THis is a simple program that dumps events to the console
+    as they happen.  Think of it like a `tcpdump` for Celery events.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 import sys
@@ -5,10 +17,10 @@ import sys
 from datetime import datetime
 
 from ..app import app_or_default
-from ..datastructures import LocalCache
+from ..datastructures import LRUCache
 
 
-TASK_NAMES = LocalCache(0xFFF)
+TASK_NAMES = LRUCache(limit=0xFFF)
 
 HUMAN_TYPES = {"worker-offline": "shutdown",
                "worker-online": "started",
@@ -30,7 +42,7 @@ class Dumper(object):
         hostname = event.pop("hostname")
         if type.startswith("task-"):
             uuid = event.pop("uuid")
-            if type.startswith("task-received"):
+            if type in ("task-received", "task-sent"):
                 task = TASK_NAMES[uuid] = "%s(%s) args=%s kwargs=%s" % (
                         event.pop("name"), uuid,
                         event.pop("args"),

+ 15 - 0
celery/events/snapshot.py

@@ -1,3 +1,18 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.events.snapshot
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    Consuming the events as a stream is not always suitable
+    so this module implements a system to take snapshots of the
+    state of a cluster at regular intervals.  There is a full
+    implementation of this writing the snapshots to a database
+    in :mod:`djcelery.snapshots` in the `django-celery` distribution.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 import atexit

+ 38 - 10
celery/events/state.py

@@ -1,3 +1,24 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.events.state
+    ~~~~~~~~~~~~~~~~~~~
+
+    This module implements a datastructure used to keep
+    track of the state of a cluster of workers and the tasks
+    it is working on (by consuming events).
+
+    For every event consumed the state is updated,
+    so the state represents the state of the cluster
+    at the time of the last event.
+
+    Snapshots (:mod:`celery.events.snapshot`) can be used to
+    take "pictures" of this state at regular intervals
+    to e.g. store that in a database.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import with_statement
 
@@ -7,7 +28,7 @@ import heapq
 from threading import Lock
 
 from .. import states
-from ..datastructures import AttributeDict, LocalCache
+from ..datastructures import AttributeDict, LRUCache
 from ..utils import kwdict
 
 #: Hartbeat expiry time in seconds.  The worker will be considered offline
@@ -173,8 +194,8 @@ class State(object):
 
     def __init__(self, callback=None,
             max_workers_in_memory=5000, max_tasks_in_memory=10000):
-        self.workers = LocalCache(max_workers_in_memory)
-        self.tasks = LocalCache(max_tasks_in_memory)
+        self.workers = LRUCache(limit=max_workers_in_memory)
+        self.tasks = LRUCache(limit=max_tasks_in_memory)
         self.event_callback = callback
         self.group_handlers = {"worker": self.worker_event,
                                "task": self.task_event}
@@ -195,9 +216,10 @@ class State(object):
 
     def _clear_tasks(self, ready=True):
         if ready:
-            self.tasks = dict((uuid, task)
-                                for uuid, task in self.tasks.items()
-                                    if task.state not in states.READY_STATES)
+            in_progress = dict((uuid, task) for uuid, task in self.itertasks()
+                                if task.state not in states.READY_STATES)
+            self.tasks.clear()
+            self.tasks.update(in_progress)
         else:
             self.tasks.clear()
 
@@ -265,13 +287,19 @@ class State(object):
         if self.event_callback:
             self.event_callback(self, event)
 
+    def itertasks(self, limit=None):
+        for index, row in enumerate(self.tasks.iteritems()):
+            yield row
+            if limit and index >= limit:
+                break
+
     def tasks_by_timestamp(self, limit=None):
         """Get tasks by timestamp.
 
         Returns a list of `(uuid, task)` tuples.
 
         """
-        return self._sort_tasks_by_time(self.tasks.items()[:limit])
+        return self._sort_tasks_by_time(self.itertasks(limit))
 
     def _sort_tasks_by_time(self, tasks):
         """Sort task items by time."""
@@ -285,7 +313,7 @@ class State(object):
 
         """
         return self._sort_tasks_by_time([(uuid, task)
-                for uuid, task in self.tasks.items()[:limit]
+                for uuid, task in self.itertasks(limit)
                     if task.name == name])
 
     def tasks_by_worker(self, hostname, limit=None):
@@ -295,12 +323,12 @@ class State(object):
 
         """
         return self._sort_tasks_by_time([(uuid, task)
-                for uuid, task in self.tasks.items()[:limit]
+                for uuid, task in self.itertasks(limit)
                     if task.worker.hostname == hostname])
 
     def task_types(self):
         """Returns a list of all seen task types."""
-        return list(sorted(set(task.name for task in self.tasks.values())))
+        return list(sorted(set(task.name for task in self.tasks.itervalues())))
 
     def alive_workers(self):
         """Returns a list of (seemingly) alive workers."""

+ 21 - 0
celery/exceptions.py

@@ -1,3 +1,16 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.exceptions
+    ~~~~~~~~~~~~~~~~~
+
+    This module contains Celery-specific exceptions.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
 UNREGISTERED_FMT = """\
 Task of kind %s is not registered, please make sure it's imported.\
 """
@@ -61,3 +74,11 @@ class TaskRevokedError(Exception):
 
 class NotConfigured(UserWarning):
     """Celery has not been configured, as no config module has been found."""
+
+
+class CPendingDeprecationWarning(PendingDeprecationWarning):
+    pass
+
+
+class CDeprecationWarning(DeprecationWarning):
+    pass

+ 1 - 0
celery/execute/__init__.py

@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 from .. import current_app

+ 15 - 2
celery/execute/trace.py

@@ -1,3 +1,15 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.execute.trace
+    ~~~~~~~~~~~~~~~~~~~~
+
+    This module defines how the task execution is traced:
+    errors are recorded, handlers are applied and so on.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 import sys
@@ -91,13 +103,14 @@ class TaskTrace(object):
         handler = self._trace_handlers[trace.status]
         r = handler(trace.retval, trace.exc_type, trace.tb, trace.strtb)
         self.handle_after_return(trace.status, trace.retval,
-                                 trace.exc_type, trace.tb, trace.strtb)
+                                 trace.exc_type, trace.tb, trace.strtb,
+                                 einfo=trace.exc_info)
         return r
 
     def handle_after_return(self, status, retval, type_, tb, strtb,
             einfo=None):
         if status in states.EXCEPTION_STATES:
-            einfo = ExceptionInfo((retval, type_, tb))
+            einfo = ExceptionInfo(einfo)
         self.task.after_return(status, retval, self.task_id,
                                self.args, self.kwargs, einfo)
 

+ 12 - 2
celery/loaders/__init__.py

@@ -1,6 +1,16 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
+"""
+    celery.loaders
+    ~~~~~~~~~~~~~~
+
+    Loaders define how configuration is read, what happens
+    when workers start, when tasks are executed and so on.
 
-import os
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
 
 from .. import current_app
 from ..utils import deprecated, get_cls_by_name

+ 11 - 0
celery/loaders/app.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.loaders.app
+    ~~~~~~~~~~~~~~~~~~
+
+    The default loader used with custom app instances.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 from .base import BaseLoader

+ 20 - 1
celery/loaders/base.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.loaders.base
+    ~~~~~~~~~~~~~~~~~~~
+
+    Loader base class.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 import importlib
@@ -11,6 +22,7 @@ from ..datastructures import DictAttribute
 from ..exceptions import ImproperlyConfigured
 from ..utils import (cached_property, get_cls_by_name,
                      import_from_cwd as _import_from_cwd)
+from ..utils.functional import maybe_list
 
 BUILTIN_MODULES = frozenset(["celery.task"])
 
@@ -62,6 +74,10 @@ class BaseLoader(object):
         starts."""
         pass
 
+    def on_worker_process_init(self):
+        """This method is called when a child process starts."""
+        pass
+
     def import_task_module(self, module):
         return self.import_from_cwd(module)
 
@@ -74,7 +90,7 @@ class BaseLoader(object):
                 package=package)
 
     def import_default_modules(self):
-        imports = set(list(self.conf.get("CELERY_IMPORTS") or ()))
+        imports = set(maybe_list(self.conf.get("CELERY_IMPORTS") or ()))
         return [self.import_task_module(module)
                     for module in imports | self.builtin_modules]
 
@@ -83,6 +99,9 @@ class BaseLoader(object):
             self.worker_initialized = True
             self.on_worker_init()
 
+    def init_worker_process(self):
+        self.on_worker_process_init()
+
     def config_from_envvar(self, variable_name, silent=False):
         module_name = os.environ.get(variable_name)
         if not module_name:

+ 11 - 0
celery/loaders/default.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.loaders.default
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    The default loader used when no custom app has been initialized.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 import os

+ 23 - 3
celery/local.py

@@ -1,4 +1,22 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.local
+    ~~~~~~~~~~~~
+
+    This module contains critical utilities that
+    needs to be loaded as soon as possible, and that
+    shall not load any third party modules.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
+
 def try_import(module):
+    """Try to import and return module, or return
+    None if the module does not exist."""
     from importlib import import_module
     try:
         return import_module(module)
@@ -6,12 +24,14 @@ def try_import(module):
         pass
 
 
-class LocalProxy(object):
-    """Code stolen from werkzeug.local.LocalProxy."""
+class Proxy(object):
+    """Proxy to another object."""
+
+    # Code stolen from werkzeug.local.Proxy.
     __slots__ = ('__local', '__dict__', '__name__')
 
     def __init__(self, local, name=None):
-        object.__setattr__(self, '_LocalProxy__local', local)
+        object.__setattr__(self, '_Proxy__local', local)
         object.__setattr__(self, '__name__', name)
 
     def _get_current_object(self):

+ 28 - 23
celery/log.py

@@ -1,4 +1,4 @@
-"""celery.log"""
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 import logging
@@ -14,12 +14,15 @@ except ImportError:
 
 from . import current_app
 from . import signals
+from .local import Proxy
 from .utils import LOG_LEVELS, isatty
 from .utils.compat import LoggerAdapter, WatchedFileHandler
-from .utils.encoding import safe_str
+from .utils.encoding import safe_str, str_t
 from .utils.patch import ensure_process_aware_logger
 from .utils.term import colored
 
+is_py3k = sys.version_info >= (3, 0)
+
 
 class ColorFormatter(logging.Formatter):
     #: Loglevel -> Color mapping.
@@ -33,8 +36,8 @@ class ColorFormatter(logging.Formatter):
 
     def formatException(self, ei):
         r = logging.Formatter.formatException(self, ei)
-        if isinstance(r, str):
-            return r.decode("utf-8", "replace")    # Convert to unicode
+        if isinstance(r, str) and not is_py3k:
+            return safe_str(r)
         return r
 
     def format(self, record):
@@ -43,22 +46,21 @@ class ColorFormatter(logging.Formatter):
 
         if self.use_color and color:
             try:
-                record.msg = str(color(safe_str(record.msg)))
+                record.msg = safe_str(str_t(color(record.msg)))
             except Exception, exc:
                 record.msg = "<Unrepresentable %r: %r>" % (
                         type(record.msg), exc)
                 record.exc_info = sys.exc_info()
 
-        # Very ugly, but have to make sure processName is supported
-        # by foreign logger instances.
-        # (processName is always supported by Python 2.7)
-        if "processName" not in record.__dict__:
-            process_name = current_process and current_process()._name or ""
-            record.__dict__["processName"] = process_name
-        t = logging.Formatter.format(self, record)
-        if isinstance(t, unicode):
-            return t.encode("utf-8", "replace")
-        return t
+        if not is_py3k:
+            # Very ugly, but have to make sure processName is supported
+            # by foreign logger instances.
+            # (processName is always supported by Python 2.7)
+            if "processName" not in record.__dict__:
+                process_name = (current_process and
+                                current_process()._name or "")
+                record.__dict__["processName"] = process_name
+        return safe_str(logging.Formatter.format(self, record))
 
 
 class Logging(object):
@@ -104,7 +106,8 @@ class Logging(object):
 
         if mputil and hasattr(mputil, "_logger"):
             mputil._logger = None
-        ensure_process_aware_logger()
+        if not is_py3k:
+            ensure_process_aware_logger()
         receivers = signals.setup_logging.send(sender=None,
                         loglevel=loglevel, logfile=logfile,
                         format=format, colorize=colorize)
@@ -219,12 +222,14 @@ class Logging(object):
         return logger
 
 
-setup_logging_subsystem = current_app.log.setup_logging_subsystem
-get_default_logger = current_app.log.get_default_logger
-setup_logger = current_app.log.setup_logger
-setup_task_logger = current_app.log.setup_task_logger
-get_task_logger = current_app.log.get_task_logger
-redirect_stdouts_to_logger = current_app.log.redirect_stdouts_to_logger
+get_default_logger = Proxy(lambda: current_app.log.get_default_logger)
+setup_logger = Proxy(lambda: current_app.log.setup_logger)
+setup_task_logger = Proxy(lambda: current_app.log.setup_task_logger)
+get_task_logger = Proxy(lambda: current_app.log.get_task_logger)
+setup_logging_subsystem = Proxy(
+            lambda: current_app.log.setup_logging_subsystem)
+redirect_stdouts_to_logger = Proxy(
+            lambda: current_app.log.redirect_stdouts_to_logger)
 
 
 class LoggingProxy(object):
@@ -282,7 +287,7 @@ class LoggingProxy(object):
         if data and not self.closed:
             self._thread.recurse_protection = True
             try:
-                self.logger.log(self.loglevel, data)
+                self.logger.log(self.loglevel, safe_str(data))
             finally:
                 self._thread.recurse_protection = False
 

+ 11 - 6
celery/messaging.py

@@ -1,8 +1,13 @@
+# -*- coding: utf-8 -*-
+"""This module is deprecated, use ``current_app.amqp`` instead."""
+from __future__ import absolute_import
+
 from . import current_app
+from .local import Proxy
 
-TaskPublisher = current_app.amqp.TaskPublisher
-ConsumerSet = current_app.amqp.ConsumerSet
-TaskConsumer = current_app.amqp.TaskConsumer
-establish_connection = current_app.broker_connection
-with_connection = current_app.with_default_connection
-get_consumer_set = current_app.amqp.get_task_consumer
+TaskPublisher = Proxy(lambda: current_app.amqp.TaskPublisher)
+ConsumerSet = Proxy(lambda: current_app.amqp.ConsumerSet)
+TaskConsumer = Proxy(lambda: current_app.amqp.TaskConsumer)
+establish_connection = Proxy(lambda: current_app.broker_connection)
+with_connection = Proxy(lambda: current_app.with_default_connection)
+get_consumer_set = Proxy(lambda: current_app.amqp.get_task_consumer)

+ 180 - 26
celery/platforms.py

@@ -1,9 +1,23 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.platforms
+    ~~~~~~~~~~~~~~~~
+
+    Utilities dealing with platform specifics: signals, daemonization,
+    users, groups, and so on.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
-import os
-import sys
 import errno
+import os
+import platform as _platform
+import shlex
 import signal as _signal
+import sys
 
 from .local import try_import
 
@@ -12,16 +26,42 @@ resource = try_import("resource")
 pwd = try_import("pwd")
 grp = try_import("grp")
 
+SYSTEM = _platform.system()
+IS_OSX = SYSTEM == "Darwin"
+IS_WINDOWS = SYSTEM == "Windows"
+
 DAEMON_UMASK = 0
 DAEMON_WORKDIR = "/"
 DAEMON_REDIRECT_TO = getattr(os, "devnull", "/dev/null")
 
 
+def pyimplementation():
+    if hasattr(_platform, "python_implementation"):
+        return _platform.python_implementation()
+    elif sys.platform.startswith("java"):
+        return "Jython %s" % (sys.platform, )
+    elif hasattr(sys, "pypy_version_info"):
+        v = ".".join(map(str, sys.pypy_version_info[:3]))
+        if sys.pypy_version_info[3:]:
+            v += "-" + "".join(map(str, sys.pypy_version_info[3:]))
+        return "PyPy %s" % (v, )
+    else:
+        return "CPython"
+
+
 class LockFailed(Exception):
+    """Raised if a pidlock can't be acquired."""
     pass
 
 
 def get_fdmax(default=None):
+    """Returns the maximum number of open file descriptors
+    on this system.
+
+    :keyword default: Value returned if there's no file
+                      descriptor limit.
+
+    """
     fdmax = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
     if fdmax == resource.RLIM_INFINITY:
         return default
@@ -29,22 +69,23 @@ def get_fdmax(default=None):
 
 
 class PIDFile(object):
+    """PID lock file.
+
+    This is the type returned by :func:`create_pidlock`.
+
+    **Should not be used directly, use the :func:`create_pidlock`
+    context instead**
+
+    """
+
+    #: Path to the pid lock file.
+    path = None
 
     def __init__(self, path):
         self.path = os.path.abspath(path)
 
-    def write_pid(self):
-        open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
-        open_mode = (((os.R_OK | os.W_OK) << 6) |
-                        ((os.R_OK) << 3) |
-                        ((os.R_OK)))
-        pidfile_fd = os.open(self.path, open_flags, open_mode)
-        pidfile = os.fdopen(pidfile_fd, "w")
-        pid = os.getpid()
-        pidfile.write("%d\n" % (pid, ))
-        pidfile.close()
-
     def acquire(self):
+        """Acquire lock."""
         try:
             self.write_pid()
         except OSError, exc:
@@ -53,13 +94,16 @@ class PIDFile(object):
     __enter__ = acquire
 
     def is_locked(self):
+        """Returns true if the pid lock exists."""
         return os.path.exists(self.path)
 
     def release(self, *args):
+        """Release lock."""
         self.remove()
     __exit__ = release
 
     def read_pid(self):
+        """Reads and returns the current pid."""
         try:
             fh = open(self.path, "r")
         except IOError, exc:
@@ -76,6 +120,7 @@ class PIDFile(object):
             raise ValueError("PID file %r contents invalid." % self.path)
 
     def remove(self):
+        """Removes the lock."""
         try:
             os.unlink(self.path)
         except OSError, exc:
@@ -84,6 +129,8 @@ class PIDFile(object):
             raise
 
     def remove_if_stale(self):
+        """Removes the lock if the process is not running.
+        (does not respond to signals)."""
         try:
             pid = self.read_pid()
         except ValueError, exc:
@@ -103,13 +150,39 @@ class PIDFile(object):
                 return True
         return False
 
+    def write_pid(self):
+        open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+        open_mode = (((os.R_OK | os.W_OK) << 6) |
+                        ((os.R_OK) << 3) |
+                        ((os.R_OK)))
+        pidfile_fd = os.open(self.path, open_flags, open_mode)
+        pidfile = os.fdopen(pidfile_fd, "w")
+        try:
+            pid = os.getpid()
+            pidfile.write("%d\n" % (pid, ))
+        finally:
+            pidfile.close()
+
 
 def create_pidlock(pidfile):
-    """Create and verify pidfile.
+    """Create and verify pid file.
+
+    If the pid file already exists the program exits with an error message,
+    however if the process it refers to is not running anymore, the pid file
+    is deleted and the program continues.
+
+    The caller is responsible for releasing the lock before the program
+    exits.
 
-    If the pidfile already exists the program exits with an error message,
-    however if the process it refers to is not running anymore, the pidfile
-    is just deleted.
+    :returns: :class:`PIDFile`.
+
+    **Example**:
+
+    .. code-block:: python
+
+        import atexit
+        pidlock = create_pidlock("/var/run/app.pid").acquire()
+        atexit.register(pidlock.release)
 
     """
 
@@ -124,11 +197,13 @@ def create_pidlock(pidfile):
 
 class DaemonContext(object):
     _is_open = False
+    workdir = DAEMON_WORKDIR
+    umask = DAEMON_UMASK
 
-    def __init__(self, pidfile=None, workdir=DAEMON_WORKDIR,
-            umask=DAEMON_UMASK, **kwargs):
-        self.workdir = workdir
-        self.umask = umask
+    def __init__(self, pidfile=None, workdir=None,
+            umask=None, **kwargs):
+        self.workdir = workdir or self.workdir
+        self.umask = self.umask if umask is None else umask
 
     def open(self):
         if not self._is_open:
@@ -168,6 +243,41 @@ class DaemonContext(object):
 
 def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
              workdir=None, **opts):
+    """Detach the current process in the background (daemonize).
+
+    :keyword logfile: Optional log file.  The ability to write to this file
+       will be verified before the process is detached.
+    :keyword pidfile: Optional pid file.  The pid file will not be created,
+      as this is the responsibility of the child.  But the process will
+      exit if the pid lock exists and the pid written is still running.
+    :keyword uid: Optional user id or user name to change
+      effective privileges to.
+    :keyword gid: Optional group id or group name to change effective
+      privileges to.
+    :keyword umask: Optional umask that will be effective in the child process.
+    :keyword workdir: Optional new working directory.
+    :keyword \*\*opts: Ignored.
+
+    **Example**:
+
+    .. code-block:: python
+
+        import atexit
+        from celery.platforms import detached, create_pidlock
+
+        with detached(logfile="/var/log/app.log", pidfile="/var/run/app.pid",
+                      uid="nobody"):
+            # Now in detached child process with effective user set to nobody,
+            # and we know that our logfile can be written to, and that
+            # the pidfile is not locked.
+            pidlock = create_pidlock("/var/run/app.pid").acquire()
+            atexit.register(pidlock.release)
+
+            # Run the program
+            program.run(logfile="/var/log/app.log")
+
+    """
+
     if not resource:
         raise RuntimeError("This platform does not support detach.")
     workdir = os.getcwd() if workdir is None else workdir
@@ -187,7 +297,7 @@ def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
 def parse_uid(uid):
     """Parse user id.
 
-    uid can be an interger (uid) or a string (username), if a username
+    uid can be an integer (uid) or a string (user name), if a user name
     the uid is taken from the password file.
 
     """
@@ -237,19 +347,19 @@ def seteuid(uid):
 def set_effective_user(uid=None, gid=None):
     """Change process privileges to new user/group.
 
-    If uid and gid is set the effective user/group is set.
+    If UID and GID is set the effective user/group is set.
 
-    If only uid is set, the effective uer is set, and the group is
+    If only UID is set, the effective user is set, and the group is
     set to the users primary group.
 
-    If only gid is set, the effective group is set.
+    If only GID is set, the effective group is set.
 
     """
     uid = uid and parse_uid(uid)
     gid = gid and parse_gid(gid)
 
     if uid:
-        # If gid isn't defined, get the primary gid of the uer.
+        # If GID isn't defined, get the primary GID of the user.
         if not gid and pwd:
             gid = pwd.getpwuid(uid).pw_gid
         setegid(gid)
@@ -259,6 +369,42 @@ def set_effective_user(uid=None, gid=None):
 
 
 class Signals(object):
+    """Convenience interface to :mod:`signals`.
+
+    If the requested signal is not supported on the current platform,
+    the operation will be ignored.
+
+    **Examples**:
+
+    .. code-block:: python
+
+        >>> from celery.platforms import signals
+
+        >>> signals["INT"] = my_handler
+
+        >>> signals["INT"]
+        my_handler
+
+        >>> signals.supported("INT")
+        True
+
+        >>> signals.signum("INT")
+        2
+
+        >>> signals.ignore("USR1")
+        >>> signals["USR1"] == signals.ignored
+        True
+
+        >>> signals.reset("USR1")
+        >>> signals["USR1"] == signals.default
+        True
+
+        >>> signals.update(INT=exit_handler,
+        ...                TERM=exit_handler,
+        ...                HUP=hup_handler)
+
+    """
+
     ignored = _signal.SIG_IGN
     default = _signal.SIG_DFL
 
@@ -361,3 +507,11 @@ def set_mp_process_title(progname, info=None, hostname=None):
     else:
         return set_process_title("%s:%s" % (progname,
                                             current_process().name), info=info)
+
+
+def shellsplit(s, posix=True):
+    # posix= option to shlex.split first available in Python 2.6+
+    lexer = shlex.shlex(s, posix=not IS_WINDOWS)
+    lexer.whitespace_split = True
+    lexer.commenters = ''
+    return list(lexer)

+ 11 - 1
celery/registry.py

@@ -1,4 +1,14 @@
-"""celery.registry"""
+# -*- coding: utf-8 -*-
+"""
+    celery.registry
+    ~~~~~~~~~~~~~~~
+
+    Registry of available tasks.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 import inspect

+ 21 - 22
celery/result.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.result
+    ~~~~~~~~~~~~~
+
+    Task results/state and groups of results.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import with_statement
 
@@ -18,11 +29,11 @@ def _unpickle_result(task_id, task_name):
     return _unpickle_task(task_name).AsyncResult(task_id)
 
 
-class BaseAsyncResult(object):
-    """Base class for pending result, supports custom task result backend.
+class AsyncResult(object):
+    """Query task state.
 
     :param task_id: see :attr:`task_id`.
-    :param backend: see :attr:`backend`.
+    :keyword backend: see :attr:`backend`.
 
     """
 
@@ -35,10 +46,10 @@ class BaseAsyncResult(object):
     #: The task result backend to use.
     backend = None
 
-    def __init__(self, task_id, backend, task_name=None, app=None):
+    def __init__(self, task_id, backend=None, task_name=None, app=None):
         self.app = app_or_default(app)
         self.task_id = task_id
-        self.backend = backend
+        self.backend = backend or self.app.backend
         self.task_name = task_name
 
     def forget(self):
@@ -183,23 +194,7 @@ class BaseAsyncResult(object):
     def status(self):
         """Deprecated alias of :attr:`state`."""
         return self.state
-
-
-class AsyncResult(BaseAsyncResult):
-    """Pending task result using the default backend.
-
-    :param task_id: The task uuid.
-
-    """
-
-    #: Task result store backend to use.
-    backend = None
-
-    def __init__(self, task_id, backend=None, task_name=None, app=None):
-        app = app_or_default(app)
-        backend = backend or app.backend
-        super(AsyncResult, self).__init__(task_id, backend,
-                                          task_name=task_name, app=app)
+BaseAsyncResult = AsyncResult  # for backwards compatibility.
 
 
 class ResultSet(object):
@@ -440,6 +435,10 @@ class ResultSet(object):
         """Deprecated alias to :attr:`results`."""
         return self.results
 
+    @property
+    def supports_native_join(self):
+        return self.results[0].backend.supports_native_join
+
 
 class TaskSetResult(ResultSet):
     """An instance of this class is returned by

+ 12 - 0
celery/routes.py

@@ -1,3 +1,15 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.routes
+    ~~~~~~~~~~~~~
+
+    Contains utilities for working with task routes
+    (:setting:`CELERY_ROUTES`).
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 from .exceptions import QueueNotFound

+ 74 - 59
celery/schedules.py

@@ -1,15 +1,31 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.schedules
+    ~~~~~~~~~~~~~~~~
+
+    Schedules define the intervals at which periodic tasks
+    should run.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
+import re
+
 from datetime import datetime, timedelta
 from dateutil.relativedelta import relativedelta
-from pyparsing import (Word, Literal, ZeroOrMore, Optional,
-                       Group, StringEnd, alphas)
 
 from .utils import is_iterable
 from .utils.timeutils import (timedelta_seconds, weekday,
                               remaining, humanize_seconds)
 
 
+class ParseException(Exception):
+    """Raised by crontab_parser when the input can't be parsed."""
+
+
 class schedule(object):
     relative = False
 
@@ -50,8 +66,8 @@ class schedule(object):
         return False, rem
 
     def __repr__(self):
-        return "<freq: %s>" % humanize_seconds(
-                timedelta_seconds(self.run_every))
+        return "<freq: %s>" % (
+                    humanize_seconds(timedelta_seconds(self.run_every)), )
 
     def __eq__(self, other):
         if isinstance(other, schedule):
@@ -85,70 +101,69 @@ class crontab_parser(object):
         [0, 1, 2, 3, 4, 5, 6]
 
     """
+    ParseException = ParseException
 
-    def __init__(self, max_=60):
-        # define the grammar structure
-        digits = "0123456789"
-        star = Literal('*')
-        number = Word(digits) | Word(alphas)
-        steps = number
-        range_ = number + Optional(Literal('-') + number)
-        numspec = star | range_
-        expr = Group(numspec) + Optional(Literal('/') + steps)
-        extra_groups = ZeroOrMore(Literal(',') + expr)
-        groups = expr + extra_groups + StringEnd()
-
-        # define parse actions
-        star.setParseAction(self._expand_star)
-        number.setParseAction(self._expand_number)
-        range_.setParseAction(self._expand_range)
-        expr.setParseAction(self._filter_steps)
-        extra_groups.setParseAction(self._ignore_comma)
-        groups.setParseAction(self._join_to_set)
+    _range = r'(\w+?)-(\w+)'
+    _steps = r'/(\w+)?'
+    _star = r'\*'
 
+    def __init__(self, max_=60):
         self.max_ = max_
-        self.parser = groups
-
-    @staticmethod
-    def _expand_number(toks):
-        try:
-            i = int(toks[0])
-        except ValueError:
-            try:
-                i = weekday(toks[0])
-            except KeyError:
-                raise ValueError("Invalid weekday literal '%s'." % toks[0])
-        return [i]
-
-    @staticmethod
-    def _expand_range(toks):
+        self.pats = (
+                (re.compile(self._range + self._steps), self._range_steps),
+                (re.compile(self._range), self._expand_range),
+                (re.compile(self._star + self._steps), self._star_steps),
+                (re.compile('^' + self._star + '$'), self._expand_star))
+
+    def parse(self, spec):
+        acc = set()
+        for part in spec.split(','):
+            if not part:
+                raise self.ParseException("empty part")
+            acc |= set(self._parse_part(part))
+        return acc
+
+    def _parse_part(self, part):
+        for regex, handler in self.pats:
+            m = regex.match(part)
+            if m:
+                return handler(m.groups())
+        return self._expand_range((part, ))
+
+    def _expand_range(self, toks):
+        fr = self._expand_number(toks[0])
         if len(toks) > 1:
-            return range(toks[0], int(toks[2]) + 1)
-        else:
-            return toks[0]
+            to = self._expand_number(toks[1])
+            return range(fr, min(to + 1, self.max_ + 1))
+        return [fr]
 
-    def _expand_star(self, toks):
-        return range(self.max_)
+    def _range_steps(self, toks):
+        if len(toks) != 3 or not toks[2]:
+            raise self.ParseException("empty filter")
+        return self._filter_steps(self._expand_range(toks[:2]), int(toks[2]))
 
-    @staticmethod
-    def _filter_steps(toks):
-        numbers = toks[0]
-        if len(toks) > 1:
-            steps = toks[2]
-            return [n for n in numbers if n % steps == 0]
-        else:
-            return numbers
+    def _star_steps(self, toks):
+        if not toks or not toks[0]:
+            raise self.ParseException("empty filter")
+        return self._filter_steps(self._expand_star(), int(toks[0]))
 
-    @staticmethod
-    def _ignore_comma(toks):
-        return [x for x in toks if x != ',']
+    def _filter_steps(self, numbers, steps):
+        return [n for n in numbers if n % steps == 0]
 
-    @staticmethod
-    def _join_to_set(toks):
-        return set(toks.asList())
+    def _expand_star(self, *args):
+        return range(self.max_)
 
-    def parse(self, cronspec):
-        return self.parser.parseString(cronspec).pop()
+    def _expand_number(self, s):
+        if isinstance(s, basestring) and s[0] == '-':
+            raise self.ParseException("negative numbers not supported")
+        try:
+            i = int(s)
+        except ValueError:
+            try:
+                i = weekday(s)
+            except KeyError:
+                raise ValueError("Invalid weekday literal '%s'." % s)
+        return i
 
 
 class crontab(schedule):

+ 8 - 349
celery/signals.py

@@ -1,366 +1,25 @@
-from __future__ import absolute_import
+# -*- coding: utf-8 -*-
 """
+    celery.signals
+    ~~~~~~~~~~~~~~
 
-==============
-celery.signals
-==============
-
-Signals allows decoupled applications to receive notifications when
-certain actions occur elsewhere in the application.
-
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
-
-.. contents::
-    :local:
-
-.. _signal-basics:
-
-Basics
-======
-
-Several kinds of events trigger signals, you can connect to these signals
-to perform actions as they trigger.
-
-Example connecting to the :signal:`task_sent` signal:
-
-.. code-block:: python
-
-    from celery.signals import task_sent
-
-    def task_sent_handler(sender=None, task_id=None, task=None, args=None,
-                          kwargs=None, **kwds):
-        print("Got signal task_sent for task id %s" % (task_id, ))
-
-    task_sent.connect(task_sent_handler)
-
-
-Some signals also have a sender which you can filter by. For example the
-:signal:`task_sent` signal uses the task name as a sender, so you can
-connect your handler to be called only when tasks with name `"tasks.add"`
-has been sent by providing the `sender` argument to
-:class:`~celery.utils.dispatch.signal.Signal.connect`:
-
-.. code-block:: python
-
-    task_sent.connect(task_sent_handler, sender="tasks.add")
-
-.. _signal-ref:
-
-Signals
-=======
-
-Task Signals
-------------
-
-.. signal:: task_sent
-
-task_sent
-~~~~~~~~~
-
-Dispatched when a task has been sent to the broker.
-Note that this is executed in the client process, the one sending
-the task, not in the worker.
-
-Sender is the name of the task being sent.
-
-Provides arguments:
-
-* task_id
-    Id of the task to be executed.
-
-* task
-    The task being executed.
-
-* args
-    the tasks positional arguments.
-
-* kwargs
-    The tasks keyword arguments.
-
-* eta
-    The time to execute the task.
-
-* taskset
-    Id of the taskset this task is part of (if any).
-
-.. signal:: task_prerun
-
-task_prerun
-~~~~~~~~~~~
-
-Dispatched before a task is executed.
-
-Sender is the task class being executed.
-
-Provides arguments:
-
-* task_id
-    Id of the task to be executed.
-
-* task
-    The task being executed.
-
-* args
-    the tasks positional arguments.
-
-* kwargs
-    The tasks keyword arguments.
-
-.. signal:: task_postrun
-
-task_postrun
-~~~~~~~~~~~~
-
-Dispatched after a task has been executed.
-
-Sender is the task class executed.
-
-Provides arguments:
-
-* task_id
-    Id of the task to be executed.
-
-* task
-    The task being executed.
-
-* args
-    The tasks positional arguments.
-
-* kwargs
-    The tasks keyword arguments.
-
-* retval
-    The return value of the task.
-
-.. signal:: task_failure
-
-task_failure
-~~~~~~~~~~~~
-
-Dispatched when a task fails.
-
-Sender is the task class executed.
-
-Provides arguments:
-
-* task_id
-    Id of the task.
-
-* exception
-    Exception instance raised.
-
-* args
-    Positional arguments the task was called with.
-
-* kwargs
-    Keyword arguments the task was called with.
-
-* traceback
-    Stack trace object.
-
-* einfo
-    The :class:`celery.datastructures.ExceptionInfo` instance.
-
-Worker Signals
---------------
-
-.. signal:: worker_init
-
-worker_init
-~~~~~~~~~~~
-
-Dispatched before the worker is started.
-
-.. signal:: worker_ready
-
-worker_ready
-~~~~~~~~~~~~
-
-Dispatched when the worker is ready to accept work.
-
-.. signal:: worker_process_init
-
-worker_process_init
-~~~~~~~~~~~~~~~~~~~
-
-Dispatched by each new pool worker process when it starts.
-
-.. signal:: worker_shutdown
-
-worker_shutdown
-~~~~~~~~~~~~~~~
-
-Dispatched when the worker is about to shut down.
-
-Celerybeat Signals
-------------------
-
-.. signal:: beat_init
-
-beat_init
-~~~~~~~~~
-
-Dispatched when celerybeat starts (either standalone or embedded).
-Sender is the :class:`celery.beat.Service` instance.
-
-.. signal:: beat_embedded_init
-
-beat_embedded_init
-~~~~~~~~~~~~~~~~~~
-
-Dispatched in addition to the :signal:`beat_init` signal when celerybeat is
-started as an embedded process.  Sender is the
-:class:`celery.beat.Service` instance.
-
-Eventlet Signals
-----------------
-
-.. signal:: eventlet_pool_started
-
-eventlet_pool_started
-~~~~~~~~~~~~~~~~~~~~~
-
-Sent when the eventlet pool has been started.
-
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
-
-.. signal:: eventlet_pool_preshutdown
-
-eventlet_pool_preshutdown
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Sent when the worker shutdown, just before the eventlet pool
-is requested to wait for remaining workers.
-
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
-
-.. signal:: eventlet_pool_postshutdown
-
-eventlet_pool_postshutdown
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Sent when the pool has been joined and the worker is ready to shutdown.
-
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
-
-.. signal:: eventlet_pool_apply
-
-eventlet_pool_apply
-~~~~~~~~~~~~~~~~~~~
-
-Sent whenever a task is applied to the pool.
-
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
-
-Provides arguments:
-
-* target
-
-    The target function.
-
-* args
-
-    Positional arguments.
-
-* kwargs
-
-    Keyword arguments.
-
-Logging Signals
----------------
-
-.. signal:: setup_logging
-
-setup_logging
-~~~~~~~~~~~~~
-
-Celery won't configure the loggers if this signal is connected,
-so you can use this to completely override the logging configuration
-with your own.
-
-If you would like to augment the logging configuration setup by
-Celery then you can use the :signal:`after_setup_logger` and
-:signal:`after_setup_task_logger` signals.
-
-Provides arguments:
-
-* loglevel
-    The level of the logging object.
-
-* logfile
-    The name of the logfile.
-
-* format
-    The log format string.
-
-* colorize
-    Specify if log messages are colored or not.
-
-.. signal:: after_setup_logger
-
-after_setup_logger
-~~~~~~~~~~~~~~~~~~
-
-Sent after the setup of every global logger (not task loggers).
-Used to augment logging configuration.
-
-Provides arguments:
-
-* logger
-    The logger object.
-
-* loglevel
-    The level of the logging object.
-
-* logfile
-    The name of the logfile.
-
-* format
-    The log format string.
-
-* colorize
-    Specify if log messages are colored or not.
-
-.. signal:: after_setup_task_logger
-
-after_setup_task_logger
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Sent after the setup of every single task logger.
-Used to augment logging configuration.
-
-Provides arguments:
-
-* logger
-    The logger object.
-
-* loglevel
-    The level of the logging object.
-
-* logfile
-    The name of the logfile.
-
-* format
-    The log format string.
-
-* colorize
-    Specify if log messages are colored or not.
+    See :ref:`signals`.
 
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
 
 """
+from __future__ import absolute_import
+
 from .utils.dispatch import Signal
 
 task_sent = Signal(providing_args=["task_id", "task",
                                    "args", "kwargs",
                                    "eta", "taskset"])
-
 task_prerun = Signal(providing_args=["task_id", "task",
                                      "args", "kwargs"])
-
 task_postrun = Signal(providing_args=["task_id", "task",
                                       "args", "kwargs", "retval"])
-
 task_failure = Signal(providing_args=["task_id", "exception",
                                       "args", "kwargs", "traceback",
                                       "einfo"])

+ 2 - 5
celery/states.py

@@ -1,13 +1,10 @@
+# -*- coding: utf-8 -*-
 """
 celery.states
 =============
 
 Built-in Task States.
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
-
-
 .. _states:
 
 States
@@ -15,7 +12,6 @@ States
 
 See :ref:`task-states`.
 
-
 Sets
 ----
 
@@ -59,6 +55,7 @@ Misc.
 -----
 
 """
+from __future__ import absolute_import
 
 #: State precedence.
 #: None represents the precedence of an unknown state.

+ 16 - 8
celery/task/__init__.py

@@ -1,17 +1,25 @@
 # -*- coding: utf-8 -*-
+"""
+    celery.task
+    ~~~~~~~~~~~
+
+    Creating tasks, subtasks, sets and chords.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 import warnings
 
 from ..app import app_or_default
+from ..exceptions import CDeprecationWarning
 
-from .base import Task, PeriodicTask
-from .sets import TaskSet, subtask
-from .chords import chord
-from .control import discard_all
-
-__all__ = ["Task", "TaskSet", "PeriodicTask", "subtask",
-           "discard_all", "chord"]
+from .base import Task, PeriodicTask  # noqa
+from .sets import TaskSet, subtask    # noqa
+from .chords import chord             # noqa
+from .control import discard_all      # noqa
 
 
 def task(*args, **kwargs):
@@ -98,7 +106,7 @@ def ping():  # ✞
     Please use :meth:`celery.task.control.ping` instead.
 
     """
-    warnings.warn(DeprecationWarning(
+    warnings.warn(CDeprecationWarning(
         "The ping task has been deprecated and will be removed in Celery "
         "v2.3.  Please use inspect.ping instead."))
     return PingTask.apply_async().get()

+ 11 - 0
celery/task/base.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.task.base
+    ~~~~~~~~~~~~~~~~
+
+    The task implementation has been moved to :mod:`celery.app.task`.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 from .. import current_app

+ 20 - 8
celery/task/chords.py

@@ -1,7 +1,18 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.task.chords
+    ~~~~~~~~~~~~~~~~~~
+
+    Chords (task set callbacks).
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
 from .. import current_app
-from ..result import TaskSetResult
+from ..result import AsyncResult, TaskSetResult
 from ..utils import uuid
 
 from .sets import TaskSet, subtask
@@ -9,11 +20,11 @@ from .sets import TaskSet, subtask
 
 @current_app.task(name="celery.chord_unlock", max_retries=None)
 def _unlock_chord(setid, callback, interval=1, propagate=False,
-        max_retries=None):
-    result = TaskSetResult.restore(setid)
+        max_retries=None, result=None):
+    result = TaskSetResult(setid, map(AsyncResult, result))
     if result.ready():
-        subtask(callback).delay(result.join(propagate=propagate))
-        result.delete()
+        j = result.join_native if result.supports_native_join else result.join
+        subtask(callback).delay(j(propagate=propagate))
     else:
         _unlock_chord.retry(countdown=interval, max_retries=max_retries)
 
@@ -32,10 +43,11 @@ class Chord(current_app.Task):
             tid = uuid()
             task.options.update(task_id=tid, chord=body)
             r.append(current_app.AsyncResult(tid))
-        current_app.TaskSetResult(setid, r).save()
-        self.backend.on_chord_apply(setid, body, interval,
+        self.backend.on_chord_apply(setid, body,
+                                    interval=interval,
                                     max_retries=max_retries,
-                                    propagate=propagate)
+                                    propagate=propagate,
+                                    result=r)
         return set.apply_async(taskset_id=setid)
 
 

+ 16 - 4
celery/task/control.py

@@ -1,3 +1,15 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.task.control
+    ~~~~~~~~~~~~~~~~~~~
+
+    Client for worker remote control commands.
+    Server implementation is in :mod:`celery.worker.control`.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import with_statement
 
@@ -52,7 +64,7 @@ class Inspect(object):
     def revoked(self):
         return self._request("dump_revoked")
 
-    def registered_tasks(self):
+    def registered(self):
         return self._request("dump_tasks")
 
     def enable_events(self):
@@ -76,6 +88,8 @@ class Inspect(object):
     def active_queues(self):
         return self._request("active_queues")
 
+    registered_tasks = registered
+
 
 class Control(object):
     Mailbox = Mailbox
@@ -209,9 +223,7 @@ class Control(object):
         """
         with self.app.default_connection(connection, connect_timeout) as conn:
             if channel is None:
-                if not getattr(conn, "_producer_chan", None):
-                    conn._producer_chan = conn.channel()
-                channel = conn._producer_chan
+                channel = conn.default_channel
             return self.mailbox(conn)._broadcast(command, arguments,
                                                  destination, reply, timeout,
                                                  limit, callback,

+ 25 - 5
celery/task/http.py

@@ -1,5 +1,17 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.task.http
+    ~~~~~~~~~~~~~~~~
+
+    Task webhooks implementation.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 
+import sys
 import urllib2
 
 from urllib import urlencode
@@ -36,11 +48,19 @@ def maybe_utf8(value):
     return value
 
 
-def utf8dict(tup):
-    """With a dict's items() tuple return a new dict with any utf-8
-    keys/values encoded."""
-    return dict((key.encode("utf-8"), maybe_utf8(value))
-                    for key, value in tup)
+if sys.version_info >= (3, 0):
+
+    def utf8dict(tup):
+        if not isinstance(tup, dict):
+            return dict(tup)
+        return tup
+else:
+
+    def utf8dict(tup):  # noqa
+        """With a dict's items() tuple return a new dict with any utf-8
+        keys/values encoded."""
+        return dict((key.encode("utf-8"), maybe_utf8(value))
+                        for key, value in tup)
 
 
 def extract_response(raw_response):

+ 4 - 4
celery/task/schedules.py

@@ -1,9 +1,9 @@
+# -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
 import warnings
-from ..schedules import schedule, crontab_parser, crontab
+from ..schedules import schedule, crontab_parser, crontab  # noqa
+from ..exceptions import CDeprecationWarning
 
-__all__ = ["schedule", "crontab_parser", "crontab"]
-
-warnings.warn(DeprecationWarning(
+warnings.warn(CDeprecationWarning(
     "celery.task.schedules is deprecated and renamed to celery.schedules"))

+ 22 - 4
celery/task/sets.py

@@ -1,3 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+    celery.task.sets
+    ~~~~~~~~~~~~~~~~
+
+    Creating and applying groups of tasks.
+
+    :copyright: (c) 2009 - 2011 by Ask Solem.
+    :license: BSD, see LICENSE for more details.
+
+"""
 from __future__ import absolute_import
 from __future__ import with_statement
 
@@ -6,6 +17,7 @@ import warnings
 from .. import registry
 from ..app import app_or_default
 from ..datastructures import AttributeDict
+from ..exceptions import CDeprecationWarning
 from ..utils import cached_property, reprcall, uuid
 from ..utils.compat import UserList
 
@@ -104,6 +116,12 @@ class subtask(AttributeDict):
         return registry.tasks[self.task]
 
 
+def maybe_subtask(t):
+    if not isinstance(t, subtask):
+        return subtask(t)
+    return t
+
+
 class TaskSet(UserList):
     """A task containing several subtasks, making it possible
     to track how many, or when all of the tasks have been completed.
@@ -128,7 +146,7 @@ class TaskSet(UserList):
         self.app = app_or_default(app)
         if task is not None:
             if hasattr(task, "__iter__"):
-                tasks = task
+                tasks = [maybe_subtask(t) for t in task]
             else:
                 # Previously TaskSet only supported applying one kind of task.
                 # the signature then was TaskSet(task, arglist),
@@ -138,7 +156,7 @@ class TaskSet(UserList):
                 self._task_name = task.name
                 warnings.warn(TASKSET_DEPRECATION_TEXT % {
                                 "cls": task.__class__.__name__},
-                              DeprecationWarning)
+                              CDeprecationWarning)
         self.data = list(tasks or [])
         self.total = len(self.tasks)
         self.Publisher = Publisher or self.app.amqp.TaskPublisher
@@ -182,12 +200,12 @@ class TaskSet(UserList):
     def task(self):
         warnings.warn(
             "TaskSet.task is deprecated and will be removed in 1.4",
-            DeprecationWarning)
+            CDeprecationWarning)
         return self._task
 
     @property
     def task_name(self):
         warnings.warn(
             "TaskSet.task_name is deprecated and will be removed in 1.4",
-            DeprecationWarning)
+            CDeprecationWarning)
         return self._task_name

+ 3 - 0
celery/tests/__init__.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import logging
 import os
 import sys
@@ -11,6 +13,7 @@ os.environ.setdefault("CELERY_CONFIG_MODULE", config_module)
 os.environ["CELERY_LOADER"] = "default"
 os.environ["EVENTLET_NOPATCH"] = "yes"
 os.environ["GEVENT_NOPATCH"] = "yes"
+os.environ["KOMBU_DISABLE_LIMIT_PROTECTION"] = "yes"
 
 try:
     WindowsError = WindowsError  # noqa

+ 2 - 0
celery/tests/compat.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import sys
 
 

+ 2 - 0
celery/tests/config.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import os
 
 BROKER_TRANSPORT = "memory"

+ 2 - 0
celery/tests/functional/case.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import atexit
 import logging
 import os

+ 2 - 0
celery/tests/functional/tasks.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import time
 
 from celery.task import task

+ 324 - 0
celery/tests/test_app/__init__.py

@@ -0,0 +1,324 @@
+from __future__ import absolute_import
+from __future__ import with_statement
+
+import os
+import sys
+
+from mock import Mock
+
+from celery import Celery
+from celery import app as _app
+from celery.app import defaults
+from celery.app.base import BaseApp
+from celery.loaders.base import BaseLoader
+from celery.platforms import pyimplementation
+from celery.utils.serialization import pickle
+
+from celery.tests import config
+from celery.tests.utils import (unittest, mask_modules, platform_pyimp,
+                                sys_platform, pypy_version)
+from celery.utils.mail import ErrorMail
+from kombu.utils import gen_unique_id
+
+THIS_IS_A_KEY = "this is a value"
+
+
+class Object(object):
+
+    def __init__(self, **kwargs):
+        for key, value in kwargs.items():
+            setattr(self, key, value)
+
+
+def _get_test_config():
+    return dict((key, getattr(config, key))
+                    for key in dir(config)
+                        if key.isupper() and not key.startswith("_"))
+
+test_config = _get_test_config()
+
+
+class test_App(unittest.TestCase):
+
+    def setUp(self):
+        self.app = Celery(set_as_current=False)
+        self.app.conf.update(test_config)
+
+    def test_task(self):
+        app = Celery("foozibari", set_as_current=False)
+
+        def fun():
+            pass
+
+        fun.__module__ = "__main__"
+        task = app.task(fun)
+        self.assertEqual(task.name, app.main + ".fun")
+
+    def test_repr(self):
+        self.assertTrue(repr(self.app))
+
+    def test_TaskSet(self):
+        ts = self.app.TaskSet()
+        self.assertListEqual(ts.tasks, [])
+        self.assertIs(ts.app, self.app)
+
+    def test_pickle_app(self):
+        changes = dict(THE_FOO_BAR="bars",
+                       THE_MII_MAR="jars")
+        self.app.conf.update(changes)
+        saved = pickle.dumps(self.app)
+        self.assertLess(len(saved), 2048)
+        restored = pickle.loads(saved)
+        self.assertDictContainsSubset(changes, restored.conf)
+
+    def test_worker_main(self):
+        from celery.bin import celeryd
+
+        class WorkerCommand(celeryd.WorkerCommand):
+
+            def execute_from_commandline(self, argv):
+                return argv
+
+        prev, celeryd.WorkerCommand = celeryd.WorkerCommand, WorkerCommand
+        try:
+            ret = self.app.worker_main(argv=["--version"])
+            self.assertListEqual(ret, ["--version"])
+        finally:
+            celeryd.WorkerCommand = prev
+
+    def test_config_from_envvar(self):
+        os.environ["CELERYTEST_CONFIG_OBJECT"] = "celery.tests.test_app"
+        self.app.config_from_envvar("CELERYTEST_CONFIG_OBJECT")
+        self.assertEqual(self.app.conf.THIS_IS_A_KEY, "this is a value")
+
+    def test_config_from_object(self):
+
+        class Object(object):
+            LEAVE_FOR_WORK = True
+            MOMENT_TO_STOP = True
+            CALL_ME_BACK = 123456789
+            WANT_ME_TO = False
+            UNDERSTAND_ME = True
+
+        self.app.config_from_object(Object())
+
+        self.assertTrue(self.app.conf.LEAVE_FOR_WORK)
+        self.assertTrue(self.app.conf.MOMENT_TO_STOP)
+        self.assertEqual(self.app.conf.CALL_ME_BACK, 123456789)
+        self.assertFalse(self.app.conf.WANT_ME_TO)
+        self.assertTrue(self.app.conf.UNDERSTAND_ME)
+
+    def test_config_from_cmdline(self):
+        cmdline = [".always_eager=no",
+                   ".result_backend=/dev/null",
+                   '.task_error_whitelist=(list)["a", "b", "c"]',
+                   "celeryd.prefetch_multiplier=368",
+                   ".foobarstring=(string)300",
+                   ".foobarint=(int)300",
+                   '.result_engine_options=(dict){"foo": "bar"}']
+        self.app.config_from_cmdline(cmdline, namespace="celery")
+        self.assertFalse(self.app.conf.CELERY_ALWAYS_EAGER)
+        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "/dev/null")
+        self.assertEqual(self.app.conf.CELERYD_PREFETCH_MULTIPLIER, 368)
+        self.assertListEqual(self.app.conf.CELERY_TASK_ERROR_WHITELIST,
+                             ["a", "b", "c"])
+        self.assertEqual(self.app.conf.CELERY_FOOBARSTRING, "300")
+        self.assertEqual(self.app.conf.CELERY_FOOBARINT, 300)
+        self.assertDictEqual(self.app.conf.CELERY_RESULT_ENGINE_OPTIONS,
+                             {"foo": "bar"})
+
+    def test_compat_setting_CELERY_BACKEND(self):
+
+        self.app.config_from_object(Object(CELERY_BACKEND="set_by_us"))
+        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "set_by_us")
+
+    def test_setting_BROKER_TRANSPORT_OPTIONS(self):
+
+        _args = {'foo': 'bar', 'spam': 'baz'}
+
+        self.app.config_from_object(Object())
+        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, {})
+
+        self.app.config_from_object(Object(BROKER_TRANSPORT_OPTIONS=_args))
+        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, _args)
+
+    def test_Windows_log_color_disabled(self):
+        self.app.IS_WINDOWS = True
+        self.assertFalse(self.app.log.supports_color())
+
+    def test_compat_setting_CARROT_BACKEND(self):
+        self.app.config_from_object(Object(CARROT_BACKEND="set_by_us"))
+        self.assertEqual(self.app.conf.BROKER_TRANSPORT, "set_by_us")
+
+    def test_mail_admins(self):
+
+        class Loader(BaseLoader):
+
+            def mail_admins(*args, **kwargs):
+                return args, kwargs
+
+        self.app.loader = Loader()
+        self.app.conf.ADMINS = None
+        self.assertFalse(self.app.mail_admins("Subject", "Body"))
+        self.app.conf.ADMINS = [("George Costanza", "george@vandelay.com")]
+        self.assertTrue(self.app.mail_admins("Subject", "Body"))
+
+    def test_amqp_get_broker_info(self):
+        self.assertDictContainsSubset({"hostname": "localhost",
+                                       "userid": "guest",
+                                       "password": "guest",
+                                       "virtual_host": "/"},
+                                      self.app.broker_connection(
+                                          transport="amqplib").info())
+        self.app.conf.BROKER_PORT = 1978
+        self.app.conf.BROKER_VHOST = "foo"
+        self.assertDictContainsSubset({"port": 1978,
+                                       "virtual_host": "foo"},
+                                      self.app.broker_connection(
+                                          transport="amqplib").info())
+        conn = self.app.broker_connection(virtual_host="/value")
+        self.assertDictContainsSubset({"virtual_host": "/value"},
+                                      conn.info())
+
+    def test_BROKER_BACKEND_alias(self):
+        self.assertEqual(self.app.conf.BROKER_BACKEND,
+                         self.app.conf.BROKER_TRANSPORT)
+
+    def test_with_default_connection(self):
+
+        @self.app.with_default_connection
+        def handler(connection=None, foo=None):
+            return connection, foo
+
+        connection, foo = handler(foo=42)
+        self.assertEqual(foo, 42)
+        self.assertTrue(connection)
+
+    def test_after_fork(self):
+        p = self.app._pool = Mock()
+        self.app._after_fork(self.app)
+        p.force_close_all.assert_called_with()
+        self.assertIsNone(self.app._pool)
+        self.app._after_fork(self.app)
+
+    def test_pool_no_multiprocessing(self):
+        with mask_modules("multiprocessing.util"):
+            pool = self.app.pool
+            self.assertIs(pool, self.app._pool)
+
+    def test_bugreport(self):
+        self.assertTrue(self.app.bugreport())
+
+    def test_send_task_sent_event(self):
+        from celery.app import amqp
+
+        class Dispatcher(object):
+            sent = []
+
+            def send(self, type, **fields):
+                self.sent.append((type, fields))
+
+        conn = self.app.broker_connection()
+        chan = conn.channel()
+        try:
+            for e in ("foo_exchange", "moo_exchange", "bar_exchange"):
+                chan.exchange_declare(e, "direct", durable=True)
+                chan.queue_declare(e, durable=True)
+                chan.queue_bind(e, e, e)
+        finally:
+            chan.close()
+        assert conn.transport_cls == "memory"
+
+        pub = self.app.amqp.TaskPublisher(conn, exchange="foo_exchange")
+        self.assertIn("foo_exchange", amqp._exchanges_declared)
+
+        dispatcher = Dispatcher()
+        self.assertTrue(pub.delay_task("footask", (), {},
+                                       exchange="moo_exchange",
+                                       routing_key="moo_exchange",
+                                       event_dispatcher=dispatcher))
+        self.assertTrue(dispatcher.sent)
+        self.assertEqual(dispatcher.sent[0][0], "task-sent")
+        self.assertTrue(pub.delay_task("footask", (), {},
+                                       event_dispatcher=dispatcher,
+                                       exchange="bar_exchange",
+                                       routing_key="bar_exchange"))
+        self.assertIn("bar_exchange", amqp._exchanges_declared)
+
+    def test_error_mail_sender(self):
+        x = ErrorMail.subject % {"name": "task_name",
+                                 "id": gen_unique_id(),
+                                 "exc": "FOOBARBAZ",
+                                 "hostname": "lana"}
+        self.assertTrue(x)
+
+
+class test_BaseApp(unittest.TestCase):
+
+    def test_on_init(self):
+        BaseApp()
+
+
+class test_defaults(unittest.TestCase):
+
+    def test_str_to_bool(self):
+        for s in ("false", "no", "0"):
+            self.assertFalse(defaults.str_to_bool(s))
+        for s in ("true", "yes", "1"):
+            self.assertTrue(defaults.str_to_bool(s))
+        with self.assertRaises(TypeError):
+            defaults.str_to_bool("unsure")
+
+
+class test_debugging_utils(unittest.TestCase):
+
+    def test_enable_disable_trace(self):
+        try:
+            _app.enable_trace()
+            self.assertEqual(_app.app_or_default, _app._app_or_default_trace)
+            _app.disable_trace()
+            self.assertEqual(_app.app_or_default, _app._app_or_default)
+        finally:
+            _app.disable_trace()
+
+
+class test_compilation(unittest.TestCase):
+    _clean = ("celery.app.base", )
+
+    def setUp(self):
+        self._prev = dict((k, sys.modules.pop(k, None)) for k in self._clean)
+
+    def tearDown(self):
+        sys.modules.update(self._prev)
+
+    def test_kombu_version_check(self):
+        import kombu
+        kombu.VERSION = (0, 9, 9)
+        with self.assertRaises(ImportError):
+            __import__("celery.app.base")
+
+
+class test_pyimplementation(unittest.TestCase):
+
+    def test_platform_python_implementation(self):
+        with platform_pyimp(lambda: "Xython"):
+            self.assertEqual(pyimplementation(), "Xython")
+
+    def test_platform_jython(self):
+        with platform_pyimp():
+            with sys_platform("java 1.6.51"):
+                self.assertIn("Jython", pyimplementation())
+
+    def test_platform_pypy(self):
+        with platform_pyimp():
+            with sys_platform("darwin"):
+                with pypy_version((1, 4, 3)):
+                    self.assertIn("PyPy", pyimplementation())
+                with pypy_version((1, 4, 3, "a4")):
+                    self.assertIn("PyPy", pyimplementation())
+
+    def test_platform_fallback(self):
+        with platform_pyimp():
+            with sys_platform("darwin"):
+                with pypy_version():
+                    self.assertEqual("CPython", pyimplementation())

+ 0 - 313
celery/tests/test_app/test_app.py

@@ -1,313 +0,0 @@
-from __future__ import with_statement
-
-import os
-import sys
-
-from mock import Mock
-
-from celery import Celery
-from celery import app as _app
-from celery.app import defaults
-from celery.app.base import BaseApp, pyimplementation
-from celery.loaders.base import BaseLoader
-from celery.utils.serialization import pickle
-
-from celery.tests import config
-from celery.tests.utils import (unittest, mask_modules, platform_pyimp,
-                                sys_platform, pypy_version)
-
-THIS_IS_A_KEY = "this is a value"
-
-
-class Object(object):
-
-    def __init__(self, **kwargs):
-        for key, value in kwargs.items():
-            setattr(self, key, value)
-
-
-def _get_test_config():
-    return dict((key, getattr(config, key))
-                    for key in dir(config)
-                        if key.isupper() and not key.startswith("_"))
-
-test_config = _get_test_config()
-
-
-class test_App(unittest.TestCase):
-
-    def setUp(self):
-        self.app = Celery(set_as_current=False)
-        self.app.conf.update(test_config)
-
-    def test_task(self):
-        app = Celery("foozibari", set_as_current=False)
-
-        def fun():
-            pass
-
-        fun.__module__ = "__main__"
-        task = app.task(fun)
-        self.assertEqual(task.name, app.main + ".fun")
-
-    def test_repr(self):
-        self.assertTrue(repr(self.app))
-
-    def test_TaskSet(self):
-        ts = self.app.TaskSet()
-        self.assertListEqual(ts.tasks, [])
-        self.assertIs(ts.app, self.app)
-
-    def test_pickle_app(self):
-        changes = dict(THE_FOO_BAR="bars",
-                       THE_MII_MAR="jars")
-        self.app.conf.update(changes)
-        saved = pickle.dumps(self.app)
-        self.assertLess(len(saved), 2048)
-        restored = pickle.loads(saved)
-        self.assertDictContainsSubset(changes, restored.conf)
-
-    def test_worker_main(self):
-        from celery.bin import celeryd
-
-        class WorkerCommand(celeryd.WorkerCommand):
-
-            def execute_from_commandline(self, argv):
-                return argv
-
-        prev, celeryd.WorkerCommand = celeryd.WorkerCommand, WorkerCommand
-        try:
-            ret = self.app.worker_main(argv=["--version"])
-            self.assertListEqual(ret, ["--version"])
-        finally:
-            celeryd.WorkerCommand = prev
-
-    def test_config_from_envvar(self):
-        os.environ["CELERYTEST_CONFIG_OBJECT"] = \
-                "celery.tests.test_app.test_app"
-        self.app.config_from_envvar("CELERYTEST_CONFIG_OBJECT")
-        self.assertEqual(self.app.conf.THIS_IS_A_KEY, "this is a value")
-
-    def test_config_from_object(self):
-
-        class Object(object):
-            LEAVE_FOR_WORK = True
-            MOMENT_TO_STOP = True
-            CALL_ME_BACK = 123456789
-            WANT_ME_TO = False
-            UNDERSTAND_ME = True
-
-        self.app.config_from_object(Object())
-
-        self.assertTrue(self.app.conf.LEAVE_FOR_WORK)
-        self.assertTrue(self.app.conf.MOMENT_TO_STOP)
-        self.assertEqual(self.app.conf.CALL_ME_BACK, 123456789)
-        self.assertFalse(self.app.conf.WANT_ME_TO)
-        self.assertTrue(self.app.conf.UNDERSTAND_ME)
-
-    def test_config_from_cmdline(self):
-        cmdline = [".always_eager=no",
-                   ".result_backend=/dev/null",
-                   '.task_error_whitelist=(list)["a", "b", "c"]',
-                   "celeryd.prefetch_multiplier=368",
-                   ".foobarstring=(string)300",
-                   ".foobarint=(int)300",
-                   '.result_engine_options=(dict){"foo": "bar"}']
-        self.app.config_from_cmdline(cmdline, namespace="celery")
-        self.assertFalse(self.app.conf.CELERY_ALWAYS_EAGER)
-        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "/dev/null")
-        self.assertEqual(self.app.conf.CELERYD_PREFETCH_MULTIPLIER, 368)
-        self.assertListEqual(self.app.conf.CELERY_TASK_ERROR_WHITELIST,
-                             ["a", "b", "c"])
-        self.assertEqual(self.app.conf.CELERY_FOOBARSTRING, "300")
-        self.assertEqual(self.app.conf.CELERY_FOOBARINT, 300)
-        self.assertDictEqual(self.app.conf.CELERY_RESULT_ENGINE_OPTIONS,
-                             {"foo": "bar"})
-
-    def test_compat_setting_CELERY_BACKEND(self):
-
-        self.app.config_from_object(Object(CELERY_BACKEND="set_by_us"))
-        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "set_by_us")
-
-    def test_setting_BROKER_TRANSPORT_OPTIONS(self):
-
-        _args = {'foo': 'bar', 'spam': 'baz'}
-
-        self.app.config_from_object(Object())
-        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, {})
-
-        self.app.config_from_object(Object(BROKER_TRANSPORT_OPTIONS=_args))
-        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, _args)
-
-    def test_Windows_log_color_disabled(self):
-        self.app.IS_WINDOWS = True
-        self.assertFalse(self.app.log.supports_color())
-
-    def test_compat_setting_CARROT_BACKEND(self):
-        self.app.config_from_object(Object(CARROT_BACKEND="set_by_us"))
-        self.assertEqual(self.app.conf.BROKER_TRANSPORT, "set_by_us")
-
-    def test_mail_admins(self):
-
-        class Loader(BaseLoader):
-
-            def mail_admins(*args, **kwargs):
-                return args, kwargs
-
-        self.app.loader = Loader()
-        self.app.conf.ADMINS = None
-        self.assertFalse(self.app.mail_admins("Subject", "Body"))
-        self.app.conf.ADMINS = [("George Costanza", "george@vandelay.com")]
-        self.assertTrue(self.app.mail_admins("Subject", "Body"))
-
-    def test_amqp_get_broker_info(self):
-        self.assertDictContainsSubset({"hostname": "localhost",
-                                       "userid": "guest",
-                                       "password": "guest",
-                                       "virtual_host": "/"},
-                                      self.app.broker_connection(
-                                          transport="amqplib").info())
-        self.app.conf.BROKER_PORT = 1978
-        self.app.conf.BROKER_VHOST = "foo"
-        self.assertDictContainsSubset({"port": 1978,
-                                       "virtual_host": "foo"},
-                                      self.app.broker_connection(
-                                          transport="amqplib").info())
-        conn = self.app.broker_connection(virtual_host="/value")
-        self.assertDictContainsSubset({"virtual_host": "/value"},
-                                      conn.info())
-
-    def test_BROKER_BACKEND_alias(self):
-        self.assertEqual(self.app.conf.BROKER_BACKEND,
-                         self.app.conf.BROKER_TRANSPORT)
-
-    def test_with_default_connection(self):
-
-        @self.app.with_default_connection
-        def handler(connection=None, foo=None):
-            return connection, foo
-
-        connection, foo = handler(foo=42)
-        self.assertEqual(foo, 42)
-        self.assertTrue(connection)
-
-    def test_after_fork(self):
-        p = self.app._pool = Mock()
-        self.app._after_fork(self.app)
-        p.force_close_all.assert_called_with()
-        self.assertIsNone(self.app._pool)
-        self.app._after_fork(self.app)
-
-    def test_pool_no_multiprocessing(self):
-        with mask_modules("multiprocessing.util"):
-            pool = self.app.pool
-            self.assertIs(pool, self.app._pool)
-
-    def test_bugreport(self):
-        self.assertTrue(self.app.bugreport())
-
-    def test_send_task_sent_event(self):
-        from celery.app import amqp
-
-        class Dispatcher(object):
-            sent = []
-
-            def send(self, type, **fields):
-                self.sent.append((type, fields))
-
-        conn = self.app.broker_connection()
-        chan = conn.channel()
-        try:
-            for e in ("foo_exchange", "moo_exchange", "bar_exchange"):
-                chan.exchange_declare(e, "direct", durable=True)
-                chan.queue_declare(e, durable=True)
-                chan.queue_bind(e, e, e)
-        finally:
-            chan.close()
-        assert conn.transport_cls == "memory"
-
-        pub = self.app.amqp.TaskPublisher(conn, exchange="foo_exchange")
-        self.assertIn("foo_exchange", amqp._exchanges_declared)
-
-        dispatcher = Dispatcher()
-        self.assertTrue(pub.delay_task("footask", (), {},
-                                       exchange="moo_exchange",
-                                       routing_key="moo_exchange",
-                                       event_dispatcher=dispatcher))
-        self.assertTrue(dispatcher.sent)
-        self.assertEqual(dispatcher.sent[0][0], "task-sent")
-        self.assertTrue(pub.delay_task("footask", (), {},
-                                       event_dispatcher=dispatcher,
-                                       exchange="bar_exchange",
-                                       routing_key="bar_exchange"))
-        self.assertIn("bar_exchange", amqp._exchanges_declared)
-
-
-class test_BaseApp(unittest.TestCase):
-
-    def test_on_init(self):
-        BaseApp()
-
-
-class test_defaults(unittest.TestCase):
-
-    def test_str_to_bool(self):
-        for s in ("false", "no", "0"):
-            self.assertFalse(defaults.str_to_bool(s))
-        for s in ("true", "yes", "1"):
-            self.assertTrue(defaults.str_to_bool(s))
-        self.assertRaises(TypeError, defaults.str_to_bool, "unsure")
-
-
-class test_debugging_utils(unittest.TestCase):
-
-    def test_enable_disable_trace(self):
-        try:
-            _app.enable_trace()
-            self.assertEqual(_app.app_or_default, _app._app_or_default_trace)
-            _app.disable_trace()
-            self.assertEqual(_app.app_or_default, _app._app_or_default)
-        finally:
-            _app.disable_trace()
-
-
-class test_compilation(unittest.TestCase):
-    _clean = ("celery.app.base", )
-
-    def setUp(self):
-        self._prev = dict((k, sys.modules.pop(k, None)) for k in self._clean)
-
-    def tearDown(self):
-        sys.modules.update(self._prev)
-
-    def test_kombu_version_check(self):
-        import kombu
-        kombu.VERSION = (0, 9, 9)
-        with self.assertRaises(ImportError):
-            __import__("celery.app.base")
-
-
-class test_pyimplementation(unittest.TestCase):
-
-    def test_platform_python_implementation(self):
-        with platform_pyimp(lambda: "Xython"):
-            self.assertEqual(pyimplementation(), "Xython")
-
-    def test_platform_jython(self):
-        with platform_pyimp():
-            with sys_platform("java 1.6.51"):
-                self.assertIn("Jython", pyimplementation())
-
-    def test_platform_pypy(self):
-        with platform_pyimp():
-            with sys_platform("darwin"):
-                with pypy_version((1, 4, 3)):
-                    self.assertIn("PyPy", pyimplementation())
-                with pypy_version((1, 4, 3, "a4")):
-                    self.assertIn("PyPy", pyimplementation())
-
-    def test_platform_fallback(self):
-        with platform_pyimp():
-            with sys_platform("darwin"):
-                with pypy_version():
-                    self.assertEqual("CPython", pyimplementation())

+ 2 - 2
celery/tests/test_app/test_app_amqp.py

@@ -1,10 +1,10 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 from mock import Mock
 
-from celery.tests.utils import AppCase
-
 from celery.app.amqp import MSG_OPTIONS, extract_msg_options
+from celery.tests.utils import AppCase
 
 
 class TestMsgOptions(AppCase):

+ 12 - 11
celery/tests/test_app/test_beat.py

@@ -1,8 +1,8 @@
+from __future__ import absolute_import
+
 import logging
-from celery.tests.utils import unittest
 
 from datetime import datetime, timedelta
-
 from nose import SkipTest
 
 from celery import beat
@@ -11,6 +11,7 @@ from celery.result import AsyncResult
 from celery.schedules import schedule
 from celery.task.base import Task
 from celery.utils import uuid
+from celery.tests.utils import unittest
 
 
 class Object(object):
@@ -191,7 +192,7 @@ class test_Scheduler(unittest.TestCase):
         self.assertTrue(scheduler.logger.logged[0])
         level, msg, args, kwargs = scheduler.logger.logged[0]
         self.assertEqual(level, logging.ERROR)
-        self.assertIn("Couldn't apply scheduled task", args[0].message)
+        self.assertIn("Couldn't apply scheduled task", args[0].args[0])
 
     def test_due_tick_RuntimeError(self):
         scheduler = mSchedulerRuntimeError()
@@ -262,7 +263,7 @@ class test_Service(unittest.TestCase):
                 if self.tick_raises_exit:
                     raise SystemExit()
                 if self.shutdown_service:
-                    self.shutdown_service._shutdown.set()
+                    self.shutdown_service._is_shutdown.set()
                 return 0.0
 
         return beat.Service(scheduler_cls=PersistentScheduler), sh
@@ -279,12 +280,12 @@ class test_Service(unittest.TestCase):
         s.sync()
         self.assertTrue(sh.closed)
         self.assertTrue(sh.synced)
-        self.assertTrue(s._stopped.isSet())
+        self.assertTrue(s._is_stopped.isSet())
         s.sync()
         s.stop(wait=False)
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
         s.stop(wait=True)
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
 
         p = s.scheduler._store
         s.scheduler._store = None
@@ -295,25 +296,25 @@ class test_Service(unittest.TestCase):
 
     def test_start_embedded_process(self):
         s, sh = self.get_service()
-        s._shutdown.set()
+        s._is_shutdown.set()
         s.start(embedded_process=True)
 
     def test_start_thread(self):
         s, sh = self.get_service()
-        s._shutdown.set()
+        s._is_shutdown.set()
         s.start(embedded_process=False)
 
     def test_start_tick_raises_exit_error(self):
         s, sh = self.get_service()
         s.scheduler.tick_raises_exit = True
         s.start()
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
 
     def test_start_manages_one_tick_before_shutdown(self):
         s, sh = self.get_service()
         s.scheduler.shutdown_service = s
         s.start()
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
 
 
 class test_EmbeddedService(unittest.TestCase):

+ 1 - 0
celery/tests/test_app/test_celery.py

@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from celery.tests.utils import unittest
 
 import celery

+ 30 - 15
celery/tests/test_app/test_loaders.py

@@ -1,18 +1,20 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import os
 import sys
+import warnings
 
 from celery import task
 from celery import loaders
 from celery.app import app_or_default
-from celery.exceptions import ImproperlyConfigured
+from celery.exceptions import CPendingDeprecationWarning, ImproperlyConfigured
 from celery.loaders import base
 from celery.loaders import default
 from celery.loaders.app import AppLoader
 
 from celery.tests.compat import catch_warnings
-from celery.tests.utils import unittest, AppCase, with_environ
+from celery.tests.utils import unittest, AppCase
 
 
 class ObjectConfig(object):
@@ -66,10 +68,22 @@ class TestLoaders(AppCase):
                           default.Loader)
 
     def test_current_loader(self):
-        self.assertIs(loaders.current_loader(), self.app.loader)
+        warnings.resetwarnings()
+        with catch_warnings(record=True) as log:
+            self.assertIs(loaders.current_loader(), self.app.loader)
+            warning = log[0].message
+
+            self.assertIsInstance(warning, CPendingDeprecationWarning)
+            self.assertIn("deprecation", warning.args[0])
 
     def test_load_settings(self):
-        self.assertIs(loaders.load_settings(), self.app.conf)
+        warnings.resetwarnings()
+        with catch_warnings(record=True) as log:
+            self.assertIs(loaders.load_settings(), self.app.conf)
+            warning = log[0].message
+
+            self.assertIsInstance(warning, CPendingDeprecationWarning)
+            self.assertIn("deprecation", warning.args[0])
 
 
 class TestLoaderBase(unittest.TestCase):
@@ -99,8 +113,10 @@ class TestLoaderBase(unittest.TestCase):
         self.assertEqual(self.loader.conf["foo"], "bar")
 
     def test_import_default_modules(self):
-        self.assertEqual(sorted(self.loader.import_default_modules()),
-                         sorted([os, sys, task]))
+        modnames = lambda l: [m.__name__ for m in l]
+        self.assertEqual(sorted(modnames(
+                            self.loader.import_default_modules())),
+                         sorted(modnames([os, sys, task])))
 
     def test_import_from_cwd_custom_imp(self):
 
@@ -122,8 +138,8 @@ class TestLoaderBase(unittest.TestCase):
             self.assertIsInstance(warning, MockMail.SendmailWarning)
             self.assertIn("KeyError", warning.args[0])
 
-            self.assertRaises(KeyError, self.loader.mail_admins,
-                              fail_silently=False, **opts)
+            with self.assertRaises(KeyError):
+                self.loader.mail_admins(fail_silently=False, **opts)
 
     def test_mail_admins(self):
         MockMail.Mailer.raise_on_send = False
@@ -139,8 +155,8 @@ class TestLoaderBase(unittest.TestCase):
         self.assertIs(loader.mail, mail)
 
     def test_cmdline_config_ValueError(self):
-        self.assertRaises(ValueError, self.loader.cmdline_config_parser,
-                         ["broker.port=foobar"])
+        with self.assertRaises(ValueError):
+            self.loader.cmdline_config_parser(["broker.port=foobar"])
 
 
 class TestDefaultLoader(unittest.TestCase):
@@ -216,17 +232,16 @@ class test_AppLoader(unittest.TestCase):
     def test_config_from_envvar(self, key="CELERY_HARNESS_CFG1"):
         self.assertFalse(self.loader.config_from_envvar("HDSAJIHWIQHEWQU",
                                                         silent=True))
-        self.assertRaises(ImproperlyConfigured,
-                          self.loader.config_from_envvar, "HDSAJIHWIQHEWQU",
-                          silent=False)
+        with self.assertRaises(ImproperlyConfigured):
+            self.loader.config_from_envvar("HDSAJIHWIQHEWQU", silent=False)
         os.environ[key] = __name__ + ".object_config"
         self.assertTrue(self.loader.config_from_envvar(key))
         self.assertEqual(self.loader.conf["FOO"], 1)
         self.assertEqual(self.loader.conf["BAR"], 2)
 
         os.environ[key] = "unknown_asdwqe.asdwqewqe"
-        self.assertRaises(ImportError,
-                          self.loader.config_from_envvar, key, silent=False)
+        with self.assertRaises(ImportError):
+            self.loader.config_from_envvar(key, silent=False)
         self.assertFalse(self.loader.config_from_envvar(key, silent=True))
 
         os.environ[key] = __name__ + ".dict_config"

+ 2 - 1
celery/tests/test_compat/test_log.py → celery/tests/test_app/test_log.py

@@ -1,8 +1,8 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
 import logging
-from celery.tests.utils import unittest
 from tempfile import mktemp
 
 from celery import log
@@ -12,6 +12,7 @@ from celery.log import (setup_logger, setup_task_logger,
                         setup_logging_subsystem)
 from celery.utils import uuid
 from celery.utils.compat import _CompatLoggerAdapter
+from celery.tests.utils import unittest
 from celery.tests.utils import (override_stdouts, wrap_logger,
                                 get_handlers, set_handlers)
 

+ 8 - 4
celery/tests/test_app/test_routes.py

@@ -1,3 +1,6 @@
+from __future__ import absolute_import
+from __future__ import with_statement
+
 from functools import wraps
 
 from celery import routes
@@ -65,7 +68,8 @@ class test_MapRoute(unittest.TestCase):
     def test_expand_route_not_found(self):
         expand = E(current_app.conf.CELERY_QUEUES)
         route = routes.MapRoute({"a": {"queue": "x"}})
-        self.assertRaises(QueueNotFound, expand, route.route_for_task("a"))
+        with self.assertRaises(QueueNotFound):
+            expand(route.route_for_task("a"))
 
 
 class test_lookup_route(unittest.TestCase):
@@ -124,14 +128,14 @@ class test_lookup_route(unittest.TestCase):
 class test_prepare(unittest.TestCase):
 
     def test_prepare(self):
-        from celery.datastructures import LocalCache
+        from celery.datastructures import LRUCache
         o = object()
         R = [{"foo": "bar"},
-                  "celery.datastructures.LocalCache",
+                  "celery.datastructures.LRUCache",
                   o]
         p = routes.prepare(R)
         self.assertIsInstance(p[0], routes.MapRoute)
-        self.assertIsInstance(maybe_promise(p[1]), LocalCache)
+        self.assertIsInstance(maybe_promise(p[1]), LRUCache)
         self.assertIs(p[2], o)
 
         self.assertEqual(routes.prepare(o), [o])

+ 9 - 7
celery/tests/test_backends/__init__.py

@@ -1,10 +1,10 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
-from celery.tests.utils import unittest
-
 from celery import backends
 from celery.backends.amqp import AMQPBackend
 from celery.backends.cache import CacheBackend
+from celery.tests.utils import unittest
 
 
 class TestBackends(unittest.TestCase):
@@ -17,11 +17,13 @@ class TestBackends(unittest.TestCase):
                                   expect_cls)
 
     def test_get_backend_cache(self):
-        backends._backend_cache = {}
-        backends.get_backend_cls("amqp")
-        self.assertIn("amqp", backends._backend_cache)
-        amqp_backend = backends.get_backend_cls("amqp")
-        self.assertIs(amqp_backend, backends._backend_cache["amqp"])
+        backends.get_backend_cls.clear()
+        hits = backends.get_backend_cls.hits
+        misses = backends.get_backend_cls.misses
+        self.assertTrue(backends.get_backend_cls("amqp"))
+        self.assertEqual(backends.get_backend_cls.misses, misses + 1)
+        self.assertTrue(backends.get_backend_cls("amqp"))
+        self.assertEqual(backends.get_backend_cls.hits, hits + 1)
 
     def test_unknown_backend(self):
         with self.assertRaises(ValueError):

+ 39 - 32
celery/tests/test_backends/test_amqp.py

@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import socket
@@ -63,10 +64,10 @@ class test_AMQPBackend(unittest.TestCase):
             raise KeyError("foo")
         except KeyError, exception:
             einfo = ExceptionInfo(sys.exc_info())
-        tb1.mark_as_failure(tid3, exception, traceback=einfo.traceback)
-        self.assertEqual(tb2.get_status(tid3), states.FAILURE)
-        self.assertIsInstance(tb2.get_result(tid3), KeyError)
-        self.assertEqual(tb2.get_traceback(tid3), einfo.traceback)
+            tb1.mark_as_failure(tid3, exception, traceback=einfo.traceback)
+            self.assertEqual(tb2.get_status(tid3), states.FAILURE)
+            self.assertIsInstance(tb2.get_result(tid3), KeyError)
+            self.assertEqual(tb2.get_traceback(tid3), einfo.traceback)
 
     def test_repair_uuid(self):
         from celery.backends.amqp import repair_uuid
@@ -74,12 +75,12 @@ class test_AMQPBackend(unittest.TestCase):
             tid = uuid()
             self.assertEqual(repair_uuid(tid.replace("-", "")), tid)
 
-    def test_expires_defaults_to_config(self):
+    def test_expires_defaults_to_config_deprecated_setting(self):
         app = app_or_default()
         prev = app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES
         app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = 10
         try:
-            b = self.create_backend(expires=None)
+            b = self.create_backend()
             self.assertEqual(b.queue_arguments.get("x-expires"), 10 * 1000.0)
         finally:
             app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = prev
@@ -111,11 +112,11 @@ class test_AMQPBackend(unittest.TestCase):
             Producer = _Producer
 
         backend = Backend()
-        self.assertRaises(KeyError, backend.store_result,
-                          "foo", "bar", "STARTED", max_retries=None)
+        with self.assertRaises(KeyError):
+            backend.store_result("foo", "bar", "STARTED", max_retries=None)
 
-        self.assertRaises(KeyError, backend.store_result,
-                          "foo", "bar", "STARTED", max_retries=10)
+        with self.assertRaises(KeyError):
+            backend.store_result("foo", "bar", "STARTED", max_retries=10)
 
     def assertState(self, retval, state):
         self.assertEqual(retval["status"], state)
@@ -182,11 +183,14 @@ class test_AMQPBackend(unittest.TestCase):
         b = self.create_backend()
 
         tid = uuid()
-        self.assertRaises(TimeoutError, b.wait_for, tid, timeout=0.1)
+        with self.assertRaises(TimeoutError):
+            b.wait_for(tid, timeout=0.1)
         b.store_result(tid, None, states.STARTED)
-        self.assertRaises(TimeoutError, b.wait_for, tid, timeout=0.1)
+        with self.assertRaises(TimeoutError):
+            b.wait_for(tid, timeout=0.1)
         b.store_result(tid, None, states.RETRY)
-        self.assertRaises(TimeoutError, b.wait_for, tid, timeout=0.1)
+        with self.assertRaises(TimeoutError):
+            b.wait_for(tid, timeout=0.1)
         b.store_result(tid, 42, states.SUCCESS)
         self.assertEqual(b.wait_for(tid, timeout=1), 42)
         b.store_result(tid, 56, states.SUCCESS)
@@ -194,7 +198,8 @@ class test_AMQPBackend(unittest.TestCase):
                          "result is cached")
         self.assertEqual(b.wait_for(tid, timeout=1, cache=False), 56)
         b.store_result(tid, KeyError("foo"), states.FAILURE)
-        self.assertRaises(KeyError, b.wait_for, tid, timeout=1, cache=False)
+        with self.assertRaises(KeyError):
+            b.wait_for(tid, timeout=1, cache=False)
 
     def test_drain_events_remaining_timeouts(self):
 
@@ -207,8 +212,8 @@ class test_AMQPBackend(unittest.TestCase):
         with current_app.pool.acquire_channel(block=False) as (_, channel):
             binding = b._create_binding(uuid())
             consumer = b._create_consumer(binding, channel)
-            self.assertRaises(socket.timeout, b.drain_events,
-                              Connection(), consumer, timeout=0.1)
+            with self.assertRaises(socket.timeout):
+                b.drain_events(Connection(), consumer, timeout=0.1)
 
     def test_get_many(self):
         b = self.create_backend()
@@ -230,8 +235,8 @@ class test_AMQPBackend(unittest.TestCase):
         cached_res = list(b.get_many(tids, timeout=1))
         self.assertEqual(sorted(cached_res), sorted(expected_results))
         b._cache[res[0][0]]["status"] = states.RETRY
-        self.assertRaises(socket.timeout, list,
-                          b.get_many(tids, timeout=0.01))
+        with self.assertRaises(socket.timeout):
+            list(b.get_many(tids, timeout=0.01))
 
     def test_test_get_many_raises_outer_block(self):
 
@@ -241,7 +246,8 @@ class test_AMQPBackend(unittest.TestCase):
                 raise KeyError("foo")
 
         b = Backend()
-        self.assertRaises(KeyError, b.get_many(["id1"]).next)
+        with self.assertRaises(KeyError):
+            b.get_many(["id1"]).next()
 
     def test_test_get_many_raises_inner_block(self):
 
@@ -251,7 +257,8 @@ class test_AMQPBackend(unittest.TestCase):
                 raise KeyError("foo")
 
         b = Backend()
-        self.assertRaises(KeyError, b.get_many(["id1"]).next)
+        with self.assertRaises(KeyError):
+            b.get_many(["id1"]).next()
 
     def test_no_expires(self):
         b = self.create_backend(expires=None)
@@ -260,8 +267,8 @@ class test_AMQPBackend(unittest.TestCase):
         app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = None
         try:
             b = self.create_backend(expires=None)
-            self.assertRaises(KeyError, b.queue_arguments.__getitem__,
-                              "x-expires")
+            with self.assertRaises(KeyError):
+                b.queue_arguments["x-expires"]
         finally:
             app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = prev
 
@@ -269,21 +276,21 @@ class test_AMQPBackend(unittest.TestCase):
         self.create_backend().process_cleanup()
 
     def test_reload_task_result(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().reload_task_result, "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().reload_task_result("x")
 
     def test_reload_taskset_result(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().reload_taskset_result, "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().reload_taskset_result("x")
 
     def test_save_taskset(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().save_taskset, "x", "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().save_taskset("x", "x")
 
     def test_restore_taskset(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().restore_taskset, "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().restore_taskset("x")
 
     def test_delete_taskset(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().delete_taskset, "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().delete_taskset("x")

+ 45 - 37
celery/tests/test_backends/test_base.py

@@ -1,10 +1,13 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
 import types
 
 from mock import Mock
+from nose import SkipTest
 
+from celery.result import AsyncResult
 from celery.utils import serialization
 from celery.utils.serialization import subclass_exception
 from celery.utils.serialization import \
@@ -25,8 +28,10 @@ class wrapobject(object):
     def __init__(self, *args, **kwargs):
         self.args = args
 
-
-Oldstyle = types.ClassType("Oldstyle", (), {})
+if sys.version_info >= (3, 0):
+    Oldstyle = None
+else:
+    Oldstyle = types.ClassType("Oldstyle", (), {})
 Unpickleable = subclass_exception("Unpickleable", KeyError, "foo.module")
 Impossible = subclass_exception("Impossible", object, "foo.module")
 Lookalike = subclass_exception("Lookalike", wrapobject, "foo.module")
@@ -45,58 +50,59 @@ class test_serialization(unittest.TestCase):
 class test_BaseBackend_interface(unittest.TestCase):
 
     def test_get_status(self):
-        self.assertRaises(NotImplementedError,
-                b.get_status, "SOMExx-N0Nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.get_status("SOMExx-N0Nex1stant-IDxx-")
 
     def test__forget(self):
-        self.assertRaises(NotImplementedError,
-                b.forget, "SOMExx-N0Nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.forget("SOMExx-N0Nex1stant-IDxx-")
 
     def test_store_result(self):
-        self.assertRaises(NotImplementedError,
-                b.store_result, "SOMExx-N0nex1stant-IDxx-", 42, states.SUCCESS)
+        with self.assertRaises(NotImplementedError):
+            b.store_result("SOMExx-N0nex1stant-IDxx-", 42, states.SUCCESS)
 
     def test_mark_as_started(self):
-        self.assertRaises(NotImplementedError,
-                b.mark_as_started, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.mark_as_started("SOMExx-N0nex1stant-IDxx-")
 
     def test_reload_task_result(self):
-        self.assertRaises(NotImplementedError,
-                b.reload_task_result, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.reload_task_result("SOMExx-N0nex1stant-IDxx-")
 
     def test_reload_taskset_result(self):
-        self.assertRaises(NotImplementedError,
-                b.reload_taskset_result, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.reload_taskset_result("SOMExx-N0nex1stant-IDxx-")
 
     def test_get_result(self):
-        self.assertRaises(NotImplementedError,
-                b.get_result, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.get_result("SOMExx-N0nex1stant-IDxx-")
 
     def test_restore_taskset(self):
-        self.assertRaises(NotImplementedError,
-                b.restore_taskset, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.restore_taskset("SOMExx-N0nex1stant-IDxx-")
 
     def test_delete_taskset(self):
-        self.assertRaises(NotImplementedError,
-                b.delete_taskset, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.delete_taskset("SOMExx-N0nex1stant-IDxx-")
 
     def test_save_taskset(self):
-        self.assertRaises(NotImplementedError,
-                b.save_taskset, "SOMExx-N0nex1stant-IDxx-", "blergh")
+        with self.assertRaises(NotImplementedError):
+            b.save_taskset("SOMExx-N0nex1stant-IDxx-", "blergh")
 
     def test_get_traceback(self):
-        self.assertRaises(NotImplementedError,
-                b.get_traceback, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.get_traceback("SOMExx-N0nex1stant-IDxx-")
 
     def test_forget(self):
-        self.assertRaises(NotImplementedError,
-                b.forget, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.forget("SOMExx-N0nex1stant-IDxx-")
 
     def test_on_chord_apply(self, unlock="celery.chord_unlock"):
         from celery.registry import tasks
         p, tasks[unlock] = tasks.get(unlock), Mock()
         try:
-            b.on_chord_apply("dakj221", "sdokqweok")
+            b.on_chord_apply("dakj221", "sdokqweok",
+                             result=map(AsyncResult, [1, 2, 3]))
             self.assertTrue(tasks[unlock].apply_async.call_count)
         finally:
             tasks[unlock] = p
@@ -105,6 +111,8 @@ class test_BaseBackend_interface(unittest.TestCase):
 class test_exception_pickle(unittest.TestCase):
 
     def test_oldstyle(self):
+        if Oldstyle is None:
+            raise SkipTest("py3k does not support old style classes")
         self.assertIsNone(fnpe(Oldstyle()))
 
     def test_BaseException(self):
@@ -272,27 +280,27 @@ class test_KeyValueStoreBackend(unittest.TestCase):
 class test_KeyValueStoreBackend_interface(unittest.TestCase):
 
     def test_get(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().get,
-                "a")
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().get("a")
 
     def test_set(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().set,
-                "a", 1)
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().set("a", 1)
 
     def test_cleanup(self):
         self.assertFalse(KeyValueStoreBackend().cleanup())
 
     def test_delete(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().delete,
-                "a")
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().delete("a")
 
     def test_mget(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().mget,
-                ["a"])
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().mget(["a"])
 
     def test_forget(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().forget,
-                "a")
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().forget("a")
 
 
 class test_DisabledBackend(unittest.TestCase):

+ 69 - 9
celery/tests/test_backends/test_cache.py

@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
@@ -10,6 +11,7 @@ from celery.backends.cache import CacheBackend, DummyClient
 from celery.exceptions import ImproperlyConfigured
 from celery.result import AsyncResult
 from celery.utils import uuid
+from celery.utils.encoding import str_to_bytes
 
 from celery.tests.utils import unittest, mask_modules, reset_modules
 
@@ -53,9 +55,9 @@ class test_CacheBackend(unittest.TestCase):
             raise KeyError("foo")
         except KeyError, exception:
             pass
-        tb.mark_as_failure(tid3, exception)
-        self.assertEqual(tb.get_status(tid3), states.FAILURE)
-        self.assertIsInstance(tb.get_result(tid3), KeyError)
+            tb.mark_as_failure(tid3, exception)
+            self.assertEqual(tb.get_status(tid3), states.FAILURE)
+            self.assertIsInstance(tb.get_result(tid3), KeyError)
 
     def test_mget(self):
         tb = CacheBackend(backend="memory://")
@@ -82,20 +84,30 @@ class test_CacheBackend(unittest.TestCase):
         self.assertEqual(tb.expires, 10)
 
     def test_unknown_backend_raises_ImproperlyConfigured(self):
-        self.assertRaises(ImproperlyConfigured,
-                          CacheBackend, backend="unknown://")
+        with self.assertRaises(ImproperlyConfigured):
+            CacheBackend(backend="unknown://")
 
 
-class MyClient(DummyClient):
+class MyMemcachedStringEncodingError(Exception):
     pass
 
 
-class test_get_best_memcache(unittest.TestCase):
+class MemcachedClient(DummyClient):
+
+    def set(self, key, value, *args, **kwargs):
+        if isinstance(key, unicode):
+            raise MyMemcachedStringEncodingError(
+                    "Keys must be str()'s, not unicode.  Convert your unicode "
+                    "strings using mystring.encode(charset)!")
+        return super(MemcachedClient, self).set(key, value, *args, **kwargs)
+
+
+class MockCacheMixin(object):
 
     @contextmanager
     def mock_memcache(self):
         memcache = types.ModuleType("memcache")
-        memcache.Client = MyClient
+        memcache.Client = MemcachedClient
         memcache.Client.__module__ = memcache.__name__
         prev, sys.modules["memcache"] = sys.modules.get("memcache"), memcache
         yield True
@@ -105,7 +117,7 @@ class test_get_best_memcache(unittest.TestCase):
     @contextmanager
     def mock_pylibmc(self):
         pylibmc = types.ModuleType("pylibmc")
-        pylibmc.Client = MyClient
+        pylibmc.Client = MemcachedClient
         pylibmc.Client.__module__ = pylibmc.__name__
         prev = sys.modules.get("pylibmc")
         sys.modules["pylibmc"] = pylibmc
@@ -113,6 +125,9 @@ class test_get_best_memcache(unittest.TestCase):
         if prev is not None:
             sys.modules["pylibmc"] = prev
 
+
+class test_get_best_memcache(unittest.TestCase, MockCacheMixin):
+
     def test_pylibmc(self):
         with reset_modules("celery.backends.cache"):
             with self.mock_pylibmc():
@@ -150,3 +165,48 @@ class test_get_best_memcache(unittest.TestCase):
         from celery.backends.cache import backends
         for name, fun in backends.items():
             self.assertTrue(fun())
+
+
+class test_memcache_key(unittest.TestCase, MockCacheMixin):
+
+    def test_memcache_unicode_key(self):
+        with self.mock_memcache():
+            with reset_modules("celery.backends.cache"):
+                with mask_modules("pylibmc"):
+                    from celery.backends import cache
+                    cache._imp = [None]
+                    task_id, result = unicode(uuid()), 42
+                    b = cache.CacheBackend(backend='memcache')
+                    b.store_result(task_id, result, status=states.SUCCESS)
+                    self.assertEqual(b.get_result(task_id), result)
+
+    def test_memcache_bytes_key(self):
+        with self.mock_memcache():
+            with reset_modules("celery.backends.cache"):
+                with mask_modules("pylibmc"):
+                    from celery.backends import cache
+                    cache._imp = [None]
+                    task_id, result = str_to_bytes(uuid()), 42
+                    b = cache.CacheBackend(backend='memcache')
+                    b.store_result(task_id, result, status=states.SUCCESS)
+                    self.assertEqual(b.get_result(task_id), result)
+
+    def test_pylibmc_unicode_key(self):
+        with reset_modules("celery.backends.cache"):
+            with self.mock_pylibmc():
+                from celery.backends import cache
+                cache._imp = [None]
+                task_id, result = unicode(uuid()), 42
+                b = cache.CacheBackend(backend='memcache')
+                b.store_result(task_id, result, status=states.SUCCESS)
+                self.assertEqual(b.get_result(task_id), result)
+
+    def test_pylibmc_bytes_key(self):
+        with reset_modules("celery.backends.cache"):
+            with self.mock_pylibmc():
+                from celery.backends import cache
+                cache._imp = [None]
+                task_id, result = str_to_bytes(uuid()), 42
+                b = cache.CacheBackend(backend='memcache')
+                b.store_result(task_id, result, status=states.SUCCESS)
+                self.assertEqual(b.get_result(task_id), result)

+ 5 - 2
celery/tests/test_backends/test_database.py

@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
@@ -43,7 +44,8 @@ class test_DatabaseBackend(unittest.TestCase):
     def test_missing_SQLAlchemy_raises_ImproperlyConfigured(self):
         with mask_modules("sqlalchemy"):
             from celery.backends.database import _sqlalchemy_installed
-            self.assertRaises(ImproperlyConfigured, _sqlalchemy_installed)
+            with self.assertRaises(ImproperlyConfigured):
+                _sqlalchemy_installed()
 
     def test_pickle_hack_for_sqla_05(self):
         import sqlalchemy as sa
@@ -66,7 +68,8 @@ class test_DatabaseBackend(unittest.TestCase):
         conf = app_or_default().conf
         prev, conf.CELERY_RESULT_DBURI = conf.CELERY_RESULT_DBURI, None
         try:
-            self.assertRaises(ImproperlyConfigured, DatabaseBackend)
+            with self.assertRaises(ImproperlyConfigured):
+                DatabaseBackend()
         finally:
             conf.CELERY_RESULT_DBURI = prev
 

+ 2 - 0
celery/tests/test_backends/test_pyredis_compat.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 from nose import SkipTest
 
 from celery.exceptions import ImproperlyConfigured

+ 3 - 3
celery/tests/test_backends/test_redis.py

@@ -3,7 +3,6 @@ from __future__ import with_statement
 
 import sys
 import socket
-from celery.tests.utils import unittest
 
 from nose import SkipTest
 
@@ -13,8 +12,8 @@ from celery import states
 from celery.utils import uuid
 from celery.backends import redis
 from celery.backends.redis import RedisBackend
-
 from celery.tests.utils import mask_modules
+from celery.tests.utils import unittest
 
 _no_redis_msg = "* Redis %s. Will not execute related tests."
 _no_redis_msg_emitted = False
@@ -112,6 +111,7 @@ class TestRedisBackendNoRedis(unittest.TestCase):
         prev = redis.RedisBackend.redis
         redis.RedisBackend.redis = None
         try:
-            self.assertRaises(ImproperlyConfigured, redis.RedisBackend)
+            with self.assertRaises(ImproperlyConfigured):
+                redis.RedisBackend()
         finally:
             redis.RedisBackend.redis = prev

+ 8 - 3
celery/tests/test_backends/test_redis_unit.py

@@ -1,9 +1,14 @@
+from __future__ import absolute_import
+
 from datetime import timedelta
 
 from mock import Mock, patch
 
 from celery import current_app
 from celery import states
+from celery.result import AsyncResult
+from celery.registry import tasks
+from celery.task import subtask
 from celery.utils import cached_property, uuid
 from celery.utils.timeutils import timedelta_seconds
 
@@ -89,7 +94,8 @@ class test_RedisBackend(unittest.TestCase):
         self.assertEqual(b.expires, 60)
 
     def test_on_chord_apply(self):
-        self.Backend().on_chord_apply()
+        self.Backend().on_chord_apply("setid", {},
+                                      result=map(AsyncResult, [1, 2, 3]))
 
     def test_mget(self):
         b = self.MockBackend()
@@ -103,8 +109,6 @@ class test_RedisBackend(unittest.TestCase):
 
     @patch("celery.result.TaskSetResult")
     def test_on_chord_part_return(self, setresult):
-        from celery.registry import tasks
-        from celery.task import subtask
         b = self.MockBackend()
         deps = Mock()
         deps.total = 10
@@ -115,6 +119,7 @@ class test_RedisBackend(unittest.TestCase):
         try:
             tasks["foobarbaz"] = task
             task.request.chord = subtask(task)
+            task.request.taskset = "setid"
 
             b.on_chord_part_return(task)
             self.assertTrue(b.client.incr.call_count)

この差分においてかなりの量のファイルが変更されているため、一部のファイルを表示していません