Bladeren bron

Merge remote-tracking branch 'celery/master/master' into f0rk/imap

Conflicts:
	celery/concurrency/processes/pool.py
Ryan Kelly 13 jaren geleden
bovenliggende
commit
f0a4f67373
100 gewijzigde bestanden met toevoegingen van 2867 en 1359 verwijderingen
  1. 94 81
      AUTHORS
  2. 388 16
      Changelog
  3. 14 20
      FAQ
  4. 6 3
      README.rst
  5. 7 5
      celery/__init__.py
  6. 54 29
      celery/app/__init__.py
  7. 6 5
      celery/app/amqp.py
  8. 27 32
      celery/app/base.py
  9. 48 19
      celery/app/defaults.py
  10. 27 11
      celery/app/task/__init__.py
  11. 11 10
      celery/apps/beat.py
  12. 21 13
      celery/apps/worker.py
  13. 14 15
      celery/backends/__init__.py
  14. 34 19
      celery/backends/amqp.py
  15. 65 35
      celery/backends/base.py
  16. 28 8
      celery/backends/cache.py
  17. 20 10
      celery/backends/cassandra.py
  18. 28 9
      celery/backends/database.py
  19. 16 9
      celery/backends/mongodb.py
  20. 14 3
      celery/backends/pyredis.py
  21. 16 7
      celery/backends/redis.py
  22. 11 2
      celery/backends/tyrant.py
  23. 40 30
      celery/beat.py
  24. 50 12
      celery/bin/base.py
  25. 6 3
      celery/bin/camqadm.py
  26. 15 7
      celery/bin/celerybeat.py
  27. 27 8
      celery/bin/celeryctl.py
  28. 6 3
      celery/bin/celeryd.py
  29. 7 7
      celery/bin/celeryd_detach.py
  30. 32 13
      celery/bin/celeryd_multi.py
  31. 16 9
      celery/bin/celeryev.py
  32. 5 3
      celery/concurrency/__init__.py
  33. 21 9
      celery/concurrency/base.py
  34. 6 3
      celery/concurrency/eventlet.py
  35. 6 2
      celery/concurrency/gevent.py
  36. 11 6
      celery/concurrency/processes/__init__.py
  37. 2 0
      celery/concurrency/processes/_win.py
  38. 54 16
      celery/concurrency/processes/pool.py
  39. 3 1
      celery/concurrency/solo.py
  40. 14 1
      celery/concurrency/threads.py
  41. 4 2
      celery/conf.py
  42. 2 0
      celery/contrib/abortable.py
  43. 30 7
      celery/contrib/batches.py
  44. 1 0
      celery/contrib/rdb.py
  45. 112 53
      celery/datastructures.py
  46. 2 0
      celery/db/a805d4bd.py
  47. 2 0
      celery/db/dfd042c7.py
  48. 8 4
      celery/db/models.py
  49. 7 5
      celery/db/session.py
  50. 7 2
      celery/decorators.py
  51. 16 4
      celery/events/__init__.py
  52. 16 5
      celery/events/cursesmon.py
  53. 17 4
      celery/events/dumper.py
  54. 23 8
      celery/events/snapshot.py
  55. 40 12
      celery/events/state.py
  56. 29 0
      celery/exceptions.py
  57. 3 3
      celery/execute/__init__.py
  58. 19 8
      celery/execute/trace.py
  59. 19 10
      celery/loaders/__init__.py
  60. 11 1
      celery/loaders/app.py
  61. 35 14
      celery/loaders/base.py
  62. 15 4
      celery/loaders/default.py
  63. 21 3
      celery/local.py
  64. 50 30
      celery/log.py
  65. 19 8
      celery/messaging.py
  66. 177 23
      celery/platforms.py
  67. 11 2
      celery/registry.py
  68. 25 10
      celery/result.py
  69. 13 3
      celery/routes.py
  70. 75 62
      celery/schedules.py
  71. 8 8
      celery/signals.py
  72. 3 4
      celery/states.py
  73. 21 7
      celery/task/__init__.py
  74. 16 5
      celery/task/base.py
  75. 22 10
      celery/task/chords.py
  76. 20 7
      celery/task/control.py
  77. 29 7
      celery/task/http.py
  78. 5 2
      celery/task/schedules.py
  79. 28 13
      celery/task/sets.py
  80. 3 0
      celery/tests/__init__.py
  81. 2 0
      celery/tests/compat.py
  82. 2 0
      celery/tests/config.py
  83. 2 0
      celery/tests/functional/case.py
  84. 2 0
      celery/tests/functional/tasks.py
  85. 324 0
      celery/tests/test_app/__init__.py
  86. 0 313
      celery/tests/test_app/test_app.py
  87. 4 4
      celery/tests/test_app/test_app_amqp.py
  88. 16 15
      celery/tests/test_app/test_beat.py
  89. 1 0
      celery/tests/test_app/test_celery.py
  90. 30 19
      celery/tests/test_app/test_loaders.py
  91. 5 4
      celery/tests/test_app/test_log.py
  92. 8 4
      celery/tests/test_app/test_routes.py
  93. 9 7
      celery/tests/test_backends/__init__.py
  94. 71 64
      celery/tests/test_backends/test_amqp.py
  95. 46 40
      celery/tests/test_backends/test_base.py
  96. 74 14
      celery/tests/test_backends/test_cache.py
  97. 16 13
      celery/tests/test_backends/test_database.py
  98. 2 0
      celery/tests/test_backends/test_pyredis_compat.py
  99. 7 7
      celery/tests/test_backends/test_redis.py
  100. 12 11
      celery/tests/test_backends/test_redis_unit.py

+ 94 - 81
AUTHORS

@@ -1,81 +1,94 @@
-Ordered by date of first contribution:
-  Ask Solem <ask@celeryproject.org>
-  Grégoire Cachet <gregoire@audacy.fr>
-  Vitaly Babiy <vbabiy86@gmail.com>
-  Brian Rosner <brosner@gmail.com>
-  Sean Creeley <sean.creeley@gmail.com>
-  Ben Firshman <ben@firshman.co.uk>
-  Augusto Becciu <augusto@becciu.org>
-  Jonatan Heyman <jonatan@heyman.info>
-  Mark Hellewell <mark.hellewell@gmail.com>
-  Jerzy Kozera <jerzy.kozera@gmail.com>
-  Brad Jasper <bjasper@gmail.com>
-  Wes Winham <winhamwr@gmail.com>
-  Timo Sugliani
-  Michael Elsdoerfer <michael@elsdoerfer.com>
-  Jason Baker <amnorvend@gmail.com>
-  Wes Turner <wes.turner@gmail.com>
-  Maxim Bodyansky <bodyansky@gmail.com>
-  Rune Halvorsen <runefh@gmail.com>
-  Aaron Ross <aaron@wawd.com>
-  Adam Endicott
-  Jesper Noehr <jesper@noehr.org>
-  Mark Stover <stovenator@gmail.com>
-  Andrew Watts <andrewwatts@gmail.com>
-  Felix Berger <bflat1@gmx.net
-  Reza Lotun <rlotun@gmail.com>
-  Mikhail Korobov <kmike84@gmail.com>
-  Jeff Balogh <me@jeffbalogh.org>
-  Patrick Altman <paltman@gmail.com>
-  Vincent Driessen <vincent@datafox.nl>
-  Hari <haridara@gmail.com>
-  Bartosz Ptaszynski
-  Marcin Lulek <info@webreactor.eu>
-  Honza Kral <honza.kral@gmail.com>
-  Jonas Haag <jonas@lophus.org>
-  Armin Ronacher <armin.ronacher@active-4.com>
-  Gunnlaugur Thor Briem <gunnlaugur@gmail.com>
-  Mikhail Gusarov <dottedmag@dottedmag.net>
-  Frédéric Junod <frederic.junod@camptocamp.com>
-  Lukas Linhart <lukas.linhart@centrumholdings.com>
-  Clay Gerrard
-  David Miller <il.livid.dream@gmail.com>
-  Juarez Bochi <jbochi@gmail.com>
-  Noah Kantrowitz <noah@coderanger.net>
-  Gert Van Gool <gertvangool@gmail.com>
-  sdcooke
-  David Cramer <dcramer@gmail.com>
-  Bryan Berg <bryan@mixedmedialabs.com>
-  Piotr Sikora <piotr.sikora@frickle.com>
-  Sam Cooke <sam@mixcloud.com>
-  John Watson <johnw@mahalo.com>
-  Martin Galpin <m@66laps.com>
-  Chris Rose <offby1@offby1.net>
-  Christopher Peplin <peplin@bueda.com>
-  David White <dpwhite2@ncsu.edu>
-  Vladimir Kryachko <vladimir.kryachko@etvnet.com>
-  Simon Josi <simon.josi@atizo.com>
-  jpellerin
-  Norman Richards <orb@nostacktrace.com>
-  Christoph Burgmer <christoph@nwebs.de>
-  Allan Caffee <allan.caffee@gmail.com>
-  Ales Zoulek <ales.zoulek@gmail.com>
-  Roberto Gaiser <gaiser@geekbunker.org>
-  Balachandran C <balachandran.c@gramvaani.org>
-  Kevin Tran <hekevintran@gmail.com>
-  Branko Čibej <brane@apache.org>
-  Jeff Terrace <jterrace@gmail.com>
-  Ryan Petrello <lists@ryanpetrello.com>
-  Marcin Kuźmiński <marcin@python-works.com>
-  Adriano Petrich <petrich@gmail.com>
-  David Strauss <david@davidstrauss.net>
-  David Arthur <mumrah@gmail.com>
-  Miguel Hernandez Martos <enlavin@gmail.com>
-  Jannis Leidel <jannis@leidel.info>
-  Harm Verhagen <harm.verhagen@gmail.com>
-  lookfwd <lookfwd@gmail.com>
-  Mauro Rocco <fireantology@gmail.com>
-  Matthew J Morrison <mattj.morrison@gmail.com>
-  Daniel Watkins <daniel@daniel-watkins.co.uk>
-  Remy Noel <mocramis@gmail.com>
-  Leo Dirac <leo@banyanbranch.com>
+=========
+ AUTHORS
+=========
+:order: sorted
+
+Aaron Ross <aaron@wawd.com>
+Adam Endicott
+Adriano Petrich <petrich@gmail.com>
+Ales Zoulek <ales.zoulek@gmail.com>
+Allan Caffee <allan.caffee@gmail.com>
+Andrew Watts <andrewwatts@gmail.com>
+Armin Ronacher <armin.ronacher@active-4.com>
+Ask Solem <ask@celeryproject.org>
+Augusto Becciu <augusto@becciu.org>
+Balachandran C <balachandran.c@gramvaani.org>
+Bartosz Ptaszynski
+Ben Firshman <ben@firshman.co.uk>
+Brad Jasper <bjasper@gmail.com>
+Branko Čibej <brane@apache.org>
+Brian Rosner <brosner@gmail.com>
+Bryan Berg <bryan@mixedmedialabs.com>
+Chase Seibert <chase.seibert+github@gmail.com>
+Chris Adams <chris@improbable.org>
+Chris Rose <offby1@offby1.net>
+Christoph Burgmer <christoph@nwebs.de>
+Christopher Peplin <peplin@bueda.com>
+Clay Gerrard
+Dan McGee <dan@archlinux.org>
+Daniel Watkins <daniel@daniel-watkins.co.uk>
+David Arthur <mumrah@gmail.com>
+David Cramer <dcramer@gmail.com>
+David Miller <il.livid.dream@gmail.com>
+David Strauss <david@davidstrauss.net>
+David White <dpwhite2@ncsu.edu>
+Felix Berger <bflat1@gmx.net
+Frédéric Junod <frederic.junod@camptocamp.com>
+Gert Van Gool <gertvangool@gmail.com>
+Greg Haskins <greg@greghaskins.com>
+Grégoire Cachet <gregoire@audacy.fr>
+Gunnlaugur Thor Briem <gunnlaugur@gmail.com>
+Hari <haridara@gmail.com>
+Harm Verhagen <harm.verhagen@gmail.com>
+Honza Kral <honza.kral@gmail.com>
+Jannis Leidel <jannis@leidel.info>
+Jason Baker <amnorvend@gmail.com>
+Jeff Balogh <me@jeffbalogh.org>
+Jeff Terrace <jterrace@gmail.com>
+Jerzy Kozera <jerzy.kozera@gmail.com>
+Jesper Noehr <jesper@noehr.org>
+John Watson <johnw@mahalo.com>
+Jonas Haag <jonas@lophus.org>
+Jonatan Heyman <jonatan@heyman.info>
+Joshua Ginsberg <jag@flowtheory.net>
+Juarez Bochi <jbochi@gmail.com>
+Kevin Tran <hekevintran@gmail.com>
+Kornelijus Survila <kornholijo@gmail.com>
+Leo Dirac <leo@banyanbranch.com>
+Lukas Linhart <lukas.linhart@centrumholdings.com>
+Marcin Kuźmiński <marcin@python-works.com>
+Marcin Lulek <info@webreactor.eu>
+Mark Hellewell <mark.hellewell@gmail.com>
+Mark Stover <stovenator@gmail.com>
+Martin Galpin <m@66laps.com>
+Matthew J Morrison <mattj.morrison@gmail.com>
+Mauro Rocco <fireantology@gmail.com>
+Maxim Bodyansky <bodyansky@gmail.com>
+Michael Elsdoerfer <michael@elsdoerfer.com>
+Miguel Hernandez Martos <enlavin@gmail.com>
+Mikhail Gusarov <dottedmag@dottedmag.net>
+Mikhail Korobov <kmike84@gmail.com>
+Neil Chintomby <neil@mochimedia.com>
+Noah Kantrowitz <noah@coderanger.net>
+Norman Richards <orb@nostacktrace.com>
+Patrick Altman <paltman@gmail.com>
+Piotr Sikora <piotr.sikora@frickle.com>
+Remy Noel <mocramis@gmail.com>
+Reza Lotun <rlotun@gmail.com>
+Roberto Gaiser <gaiser@geekbunker.org>
+Rune Halvorsen <runefh@gmail.com>
+Ryan Petrello <lists@ryanpetrello.com>
+Sam Cooke <sam@mixcloud.com>
+Sean Creeley <sean.creeley@gmail.com>
+Simon Josi <simon.josi@atizo.com>
+Steeve Morin <steeve.morin@gmail.com>
+Stefan Kjartansson <esteban.supreme@gmail.com>
+Timo Sugliani
+Vincent Driessen <vincent@datafox.nl>
+Vitaly Babiy <vbabiy86@gmail.com>
+Vladimir Kryachko <vladimir.kryachko@etvnet.com>
+Wes Turner <wes.turner@gmail.com>
+Wes Winham <winhamwr@gmail.com>
+jpellerin
+lookfwd <lookfwd@gmail.com>
+sdcooke

+ 388 - 16
Changelog

@@ -5,6 +5,378 @@
 .. contents::
     :local:
 
+.. _version-2.4.0:
+
+2.4.0
+=====
+:release-date: 2011-10-28 04:00 P.M BST
+:status: FROZEN
+:branch: master
+
+.. _v240-important:
+
+Important Notes
+---------------
+
+* Now supports Python 3.
+
+* Fixed deadlock in worker process handling (Issue #496).
+
+    A deadlock could occur after spawning new child processes because
+    the logging library's mutex was not properly reset after fork.
+
+    The symptoms of this bug affecting would be that the worker simply
+    stops processing tasks, as none of the workers child processes
+    are functioning.  There was a greater chance of this bug occurring
+    with ``maxtasksperchild`` or a time-limit enabled.
+
+    This is a workaround for http://bugs.python.org/issue6721#msg140215.
+
+    Fix contributed by Harm Verhagen.
+
+* AMQP Result backend: Now expires results by default.
+
+    The default expiration value is now taken from the
+    :setting:`CELERY_TASK_RESULT_EXPIRES` setting.
+
+    The old :setting:`CELERY_AMQP_TASK_RESULT_EXPIRES` setting has been
+    deprecated and will be removed in version 3.0.
+
+    Note that this means that the result backend requires RabbitMQ 1.1.0 or
+    higher, and that you have to disable expiration if you are running
+    with an older version.  You can do so by disabling the
+    :setting:`CELERY_TASK_RESULT_EXPIRES` setting::
+
+        CELERY_TASK_RESULT_EXPIRES = None
+
+* Eventlet: Fixed problem with shutdown (Issue #457).
+
+* Broker transports can be now be specified using URLs
+
+    The broker can now be specified as an URL instead.
+    This URL must have the format::
+
+        transport://user:password@hostname:port/virtual_host
+
+    for example the default broker is written as::
+
+        amqp://guest:guest@localhost:5672//
+
+    The scheme is required, so that the host is identified
+    as an URL and not just a host name.
+    User, password, port and virtual_host are optional and
+    defaults to the particular transports default value.
+
+    .. note::
+
+        Note that the path component (virtual_host) always starts with a
+        forward-slash.  This is necessary to distinguish between the virtual
+        host ``''`` (empty) and ``'/'``, which are both acceptable virtual
+        host names.
+
+        A virtual host of ``'/'`` becomes:
+
+            amqp://guest:guest@localhost:5672//
+
+        and a virtual host of ``''`` (empty) becomes::
+
+            amqp://guest:guest@localhost:5672/
+
+        So the leading slash in the path component is **always required**.
+
+    In addition the :setting:`BROKER_URL` setting has been added as an alias
+    to ``BROKER_HOST``.  Any broker setting specified in both the URL and in
+    the configuration will be ignored, if a setting is not provided in the URL
+    then the value from the configuration will be used as default.
+
+    Also, programs now support the :option:`-b|--broker` option to specify
+    a broker URL on the command line::
+
+        $ celeryd -b redis://localhost
+
+        $ celeryctl -b amqp://guest:guest@localhost//e
+
+    The environment variable :envvar:`CELERY_BROKER_URL` can also be used to
+    easily override the default broker used.
+
+* The deprecated :func:`celery.loaders.setup_loader` function has been removed.
+
+* The :setting:`CELERY_TASK_ERROR_WHITELIST` setting has been replaced
+  by a more flexible approach (Issue #447).
+
+    The error mail sending logic is now available as ``Task.ErrorMail``,
+    with the implementation (for reference) in :mod:`celery.utils.mail`.
+
+    The error mail class can be sub-classed to gain complete control
+    of when error messages are sent, thus removing the need for a separate
+    white-list setting.
+
+    The :setting:`CELERY_TASK_ERROR_WHITELIST` setting has been deprecated,
+    and will be removed completely in version 3.0.
+
+* Additional Deprecations
+
+    The following functions has been deprecated and is scheduled for removal in
+    version 3.0:
+
+    =====================================  ===================================
+    **Old function**                       **Alternative**
+    =====================================  ===================================
+    `celery.loaders.current_loader`        `celery.current_app.loader`
+    `celery.loaders.load_settings`         `celery.current_app.conf`
+    `celery.execute.apply`                 `Task.apply`
+    `celery.execute.apply_async`           `Task.apply_async`
+    `celery.execute.delay_task`            `celery.execute.send_task`
+    =====================================  ===================================
+
+    The following settings has been deprecated and is scheduled for removal
+    in version 3.0:
+
+    =====================================  ===================================
+    **Old setting**                        **Alternative**
+    =====================================  ===================================
+    `CELERYD_LOG_LEVEL`                    ``celeryd --loglevel=``
+    `CELERYD_LOG_FILE`                     ``celeryd --logfile=``
+    `CELERYBEAT_LOG_LEVEL`                 ``celerybeat --loglevel=``
+    `CELERYBEAT_LOG_FILE`                  ``celerybeat --logfile=``
+    `CELERYMON_LOG_LEVEL`                  ``celerymon --loglevel=``
+    `CELERYMON_LOG_FILE`                   ``celerymon --logfile=``
+    =====================================  ===================================
+
+.. _v240-news:
+
+News
+----
+
+* No longer depends on :mod:`pyparsing`.
+
+* Now depends on Kombu 1.4.3.
+
+* CELERY_IMPORTS can now be a scalar value (Issue #485).
+
+    It is too easy to forget to add the comma after the sole element of a
+    tuple, and this is something that often affects newcomers.
+
+    The docs should probably use a list in examples, as using a tuple
+    for this doesn't even make sense.  Nonetheless, there are many
+    tutorials out there using a tuple, and this change should be a help
+    to new users.
+
+    Suggested by jsaxon-cars.
+
+* Fixed a memory leak when using the thread pool (Issue #486).
+
+    Contributed by Kornelijus Survila.
+
+* The statedb was not saved at exit.
+
+    This has now been fixed and it should again remember previously
+    revoked tasks when a ``--statedb`` is enabled.
+
+* Adds :setting:`EMAIL_USE_TLS` to enable secure SMTP connections
+  (Issue #418).
+
+    Contributed by Stefan Kjartansson.
+
+* Now handles missing fields in task messages as documented in the message
+  format documentation.
+
+    * Missing required field throws :exc:`InvalidTaskError`
+    * Missing args/kwargs is assumed empty.
+
+    Contributed by Chris Chamberlin.
+
+* Fixed race condition in celery.events.state (celerymon/celeryev)
+  where task info would be removed while iterating over it (Issue #501).
+
+* The Cache, Cassandra, MongoDB, Redis and Tyrant backends now respects
+  the :setting:`CELERY_RESULT_SERIALIZER` setting (Issue #435).
+
+    This means that only the database (django/sqlalchemy) backends
+    currently does not support using custom serializers.
+
+    Contributed by Steeve Morin
+
+* Logging calls no longer manually formats messages, but delegates
+  that to the logging system, so tools like Sentry can easier
+  work with the messages (Issue #445).
+
+    Contributed by Chris Adams.
+
+* ``celeryd_multi`` now supports a ``stop_verify`` command to wait for
+  processes to shutdown.
+
+* Cache backend did not work if the cache key was unicode (Issue #504).
+
+    Fix contributed by Neil Chintomby.
+
+* New setting :setting:`CELERY_RESULT_DB_SHORT_LIVED_SESSIONS` added,
+  which if enabled will disable the caching of SQLAlchemy sessions
+  (Issue #449).
+
+    Contributed by Leo Dirac.
+
+* All result backends now implements ``__reduce__`` so that they can
+  be pickled (Issue #441).
+
+    Fix contributed by Remy Noel
+
+* celeryd-multi did not work on Windows (Issue #472).
+
+* New-style ``CELERY_REDIS_*`` settings now takes precedence over
+  the old ``REDIS_*`` configuration keys (Issue #508).
+
+    Fix contributed by Joshua Ginsberg
+
+* Generic celerybeat init script no longer sets `bash -e` (Issue #510).
+
+    Fix contributed by Roger Hu.
+
+* Documented that Chords do not work well with redis-server versions
+  before 2.2.
+
+    Contributed by Dan McGee.
+
+* The :setting:`CELERYBEAT_MAX_LOOP_INTERVAL` setting was not respected.
+
+* ``inspect.registered_tasks`` renamed to ``inspect.registered`` for naming
+  consistency.
+
+    The previous name is still available as an alias.
+
+    Contributed by Mher Movsisyan
+
+* Worker logged the string representation of args and kwargs
+  without safe guards (Issue #480).
+
+* RHEL init script: Changed celeryd startup priority.
+
+    The default start / stop priorities for MySQL on RHEL are
+
+        # chkconfig: - 64 36
+
+    Therefore, if Celery is using a database as a broker / message store, it
+    should be started after the database is up and running, otherwise errors
+    will ensue. This commit changes the priority in the init script to
+
+        # chkconfig: - 85 15
+
+    which are the default recommended settings for 3-rd party applications
+    and assure that Celery will be started after the database service & shut
+    down before it terminates.
+
+    Contributed by Yury V. Zaytsev.
+
+* KeyValueStoreBackend.get_many did not respect the ``timeout`` argument
+  (Issue #512).
+
+* celerybeat/celeryev's --workdir option did not chdir before after
+  configuration was attempted (Issue #506).
+
+* After deprecating 2.4 support we can now name modules correctly, since we
+  can take use of absolute imports.
+
+    Therefore the following internal modules have been renamed:
+
+        celery.concurrency.evlet    -> celery.concurrency.eventlet
+        celery.concurrency.evg      -> celery.concurrency.gevent
+
+* AUTHORS file is now sorted alphabetically.
+
+    Also, as you may have noticed the contributors of new features/fixes are
+    now mentioned in the Changelog.
+
+.. _version-2.3.3:
+
+2.3.3
+=====
+:release-date: 2011-16-09 05:00 P.M BST
+:by: Mher Movsisyan
+
+* Monkey patching :attr:`sys.stdout` could result in the worker
+  crashing if the replacing object did not define :meth:`isatty`
+  (Issue #477).
+
+* ``CELERYD`` option in :file:`/etc/default/celeryd` should not
+  be used with generic init scripts.
+
+
+.. _version-2.3.2:
+
+2.3.2
+=====
+:release-date: 2011-10-07 05:00 P.M BST
+
+.. _v232-news:
+
+News
+----
+
+* Improved Contributing guide.
+
+    If you'd like to contribute to Celery you should read this
+    guide: http://ask.github.com/celery/contributing.html
+
+    We are looking for contributors at all skill levels, so don't
+    hesitate!
+
+* Now depends on Kombu 1.3.1
+
+* ``Task.request`` now contains the current worker host name (Issue #460).
+
+    Available as ``task.request.hostname``.
+
+* It is now easier for app subclasses to extend how they are pickled.
+    (see :class:`celery.app.AppPickler`).
+
+.. _v232-fixes:
+
+Fixes
+-----
+
+* `purge/discard_all` was not working correctly (Issue #455).
+
+* The coloring of log messages didn't handle non-ASCII data well
+  (Issue #427).
+
+* [Windows] the multiprocessing pool tried to import ``os.kill``
+  even though this is not available there (Issue #450).
+
+* Fixes case where the worker could become unresponsive because of tasks
+  exceeding the hard time limit.
+
+* The ``task-sent`` event was missing from the event reference.
+
+* ``ResultSet.iterate`` now returns results as they finish (Issue #459).
+
+    This was not the case previously, even though the documentation
+    states this was the expected behavior.
+
+* Retries will no longer be performed when tasks are called directly
+  (using ``__call__``).
+
+   Instead the exception passed to ``retry`` will be re-raised.
+
+* Eventlet no longer crashes if autoscale is enabled.
+
+    growing and shrinking eventlet pools is still not supported.
+
+* py24 target removed from :file:`tox.ini`.
+
+
+.. _version-2.3.1:
+
+2.3.1
+=====
+:release-date: 2011-08-07 08:00 P.M BST
+
+Fixes
+-----
+
+* The :setting:`CELERY_AMQP_TASK_RESULT_EXPIRES` setting did not work,
+  resulting in an AMQP related error about not being able to serialize
+  floats while trying to publish task states (Issue #446).
+
 .. _version-2.3.0:
 
 2.3.0
@@ -34,7 +406,7 @@ Important Notes
 
     The default backend is now a dummy backend
     (:class:`celery.backends.base.DisabledBackend`).  Saving state is simply an
-    noop operation, and AsyncResult.wait(), .result, .state, etc will raise
+    noop operation, and AsyncResult.wait(), .result, .state, etc. will raise
     a :exc:`NotImplementedError` telling the user to configure the result backend.
 
     For help choosing a backend please see :ref:`task-result-backends`.
@@ -49,7 +421,7 @@ Important Notes
         For django-celery users the default backend is still ``database``,
         and results are not disabled by default.
 
-* The debian init scripts have been deprecated in favor of the generic-init.d
+* The Debian init scripts have been deprecated in favor of the generic-init.d
   init scripts.
 
     In addition generic init scripts for celerybeat and celeryev has been
@@ -147,7 +519,7 @@ News
     The old names are still supported but pending deprecation.
 
 * PyPy: The default pool implementation used is now multiprocessing
-  if running on pypy 1.5.
+  if running on PyPy 1.5.
 
 * celeryd-multi: now supports "pass through" options.
 
@@ -193,7 +565,7 @@ News
   has been renamed to ``callback`` and ``errback`` and take a single scalar
   value instead of a list.
 
-* No longer propagates errors occuring during process cleanup (Issue #365)
+* No longer propagates errors occurring during process cleanup (Issue #365)
 
 * Added ``TaskSetResult.delete()``, which will delete a previously
   saved taskset result.
@@ -231,7 +603,7 @@ Fixes
 * ``CELERY_TASK_ERROR_WHITE_LIST`` is now properly initialized
   in all loaders.
 
-* celeryd_detach now passes thorugh commandline configuration.
+* celeryd_detach now passes through command-line configuration.
 
 * Remote control command ``add_consumer`` now does nothing if the
   queue is already being consumed from.
@@ -1423,7 +1795,7 @@ News
     :class:`~celery.task.control.inspect`.
 
 
-    Example using celeryctl to start consuming from queue "queue", in 
+    Example using celeryctl to start consuming from queue "queue", in
     exchange "exchange", of type "direct" using binding key "key"::
 
         $ celeryctl inspect add_consumer queue exchange direct key
@@ -3346,7 +3718,7 @@ Fixes
   by rounding to the nearest day/hour.
 
 * Fixed a potential infinite loop in `BaseAsyncResult.__eq__`, although
-  there is no evidence that it has ever been triggered. 
+  there is no evidence that it has ever been triggered.
 
 * celeryd: Now handles messages with encoding problems by acking them and
   emitting an error message.
@@ -3409,7 +3781,7 @@ Fixes
 
 * Execution: `.messaging.TaskPublisher.send_task` now
   incorporates all the functionality apply_async previously did.
-  
+
     Like converting countdowns to eta, so :func:`celery.execute.apply_async` is
     now simply a convenient front-end to
     :meth:`celery.messaging.TaskPublisher.send_task`, using
@@ -3466,7 +3838,7 @@ Fixes
     is revoked even though it's currently being hold because its eta is e.g.
     a week into the future.
 
-* The `task_id` argument is now respected even if the task is executed 
+* The `task_id` argument is now respected even if the task is executed
   eagerly (either using apply, or :setting:`CELERY_ALWAYS_EAGER`).
 
 * The internal queues are now cleared if the connection is reset.
@@ -3738,7 +4110,7 @@ News
     ...                   ([8, 8], {}, {"countdown": 3})])
     >>> ts.run()
 
-* Got a 3x performance gain by setting the prefetch count to four times the 
+* Got a 3x performance gain by setting the prefetch count to four times the
   concurrency, (from an average task round-trip of 0.1s to 0.03s!).
 
     A new setting has been added: :setting:`CELERYD_PREFETCH_MULTIPLIER`, which
@@ -3875,7 +4247,7 @@ Documentation
 :release-date: 2009-11-20 03:40 P.M CEST
 
 * QOS Prefetch count was not applied properly, as it was set for every message
-  received (which apparently behaves like, "receive one more"), instead of only 
+  received (which apparently behaves like, "receive one more"), instead of only
   set when our wanted value changed.
 
 .. _version-0.8.1:
@@ -4052,7 +4424,7 @@ Important changes
 
 * Support for multiple AMQP exchanges and queues.
 
-    This feature misses documentation and tests, so anyone interested 
+    This feature misses documentation and tests, so anyone interested
     is encouraged to improve this situation.
 
 * celeryd now survives a restart of the AMQP server!
@@ -4175,7 +4547,7 @@ News
 
 * Functions/methods with a timeout argument now works correctly.
 
-* New: `celery.strategy.even_time_distribution`: 
+* New: `celery.strategy.even_time_distribution`:
     With an iterator yielding task args, kwargs tuples, evenly distribute
     the processing of its tasks throughout the time window available.
 
@@ -4338,7 +4710,7 @@ News
 *  Only use README as long_description if the file exists so easy_install
    doesn't break.
 
-* `celery.view`: JSON responses now properly set its mime-type. 
+* `celery.view`: JSON responses now properly set its mime-type.
 
 * `apply_async` now has a `connection` keyword argument so you
   can re-use the same AMQP connection if you want to execute
@@ -4524,7 +4896,7 @@ arguments, so be sure to flush your task queue before you upgrade.
   version to 0.2. This is a pre-release.
 
 * `celery.task.mark_as_read()` and `celery.task.mark_as_failure()` has
-  been removed. Use `celery.backends.default_backend.mark_as_read()`, 
+  been removed. Use `celery.backends.default_backend.mark_as_read()`,
   and `celery.backends.default_backend.mark_as_failure()` instead.
 
 .. _version-0.1.15:
@@ -4581,7 +4953,7 @@ arguments, so be sure to flush your task queue before you upgrade.
   happened.  It kind of works like the `multiprocessing.AsyncResult`
   class returned by `multiprocessing.Pool.map_async`.
 
-* Added dmap() and dmap_async(). This works like the 
+* Added dmap() and dmap_async(). This works like the
   `multiprocessing.Pool` versions except they are tasks
   distributed to the celery server. Example:
 

+ 14 - 20
FAQ

@@ -55,12 +55,12 @@ Is Celery dependent on pickle?
 
 **Answer:** No.
 
-Celery can support any serialization scheme and has support for JSON/YAML and
-Pickle by default. And as every task is associated with a content type, you
-can even send one task using pickle, and another using JSON.
+Celery can support any serialization scheme and has built-in support for
+JSON, YAML, Pickle and msgpack. Also, as every task is associated with a
+content type, you can even send one task using pickle, and another using JSON.
 
 The default serialization format is pickle simply because it is
-convenient as it supports sending complex Python objects as task arguments.
+convenient (it supports sending complex Python objects as task arguments).
 
 If you need to communicate with other languages you should change
 to a serialization format that is suitable for that.
@@ -114,7 +114,7 @@ Is Celery multilingual?
 
 **Answer:** Yes.
 
-:mod:`~celery.bin.celeryd` is an implementation of Celery in python. If the
+:mod:`~celery.bin.celeryd` is an implementation of Celery in Python. If the
 language has an AMQP client, there shouldn't be much work to create a worker
 in your language.  A Celery worker is just a program connecting to the broker
 to process messages.
@@ -180,8 +180,8 @@ most systems), it usually contains a message describing the reason.
 
 .. _faq-celeryd-on-freebsd:
 
-Why won't celeryd run on FreeBSD?
----------------------------------
+Does it work on FreeBSD?
+------------------------
 
 **Answer:** The multiprocessing pool requires a working POSIX semaphore
 implementation which isn't enabled in FreeBSD by default. You have to enable
@@ -252,7 +252,7 @@ other error is happening.
 
 .. _faq-periodic-task-does-not-run:
 
-Why won't my Periodic Task run?
+Why won't my periodic task run?
 -------------------------------
 
 **Answer:** See `Why won't my Task run?`_.
@@ -405,6 +405,10 @@ When running with the AMQP result backend, every task result will be sent
 as a message. If you don't collect these results, they will build up and
 RabbitMQ will eventually run out of memory.
 
+Results expire after 1 day by default.  It may be a good idea
+to lower this value by configuring the :setting:`CELERY_TASK_RESULT_EXPIRES`
+setting.
+
 If you don't use the results for a task, make sure you set the
 `ignore_result` option:
 
@@ -417,17 +421,6 @@ If you don't use the results for a task, make sure you set the
     class MyTask(Task):
         ignore_result = True
 
-Results can also be disabled globally using the
-:setting:`CELERY_IGNORE_RESULT` setting.
-
-.. note::
-
-    Celery version 2.1 added support for automatic expiration of
-    AMQP result backend results.
-
-    To use this you need to run RabbitMQ 2.1 or higher and enable
-    the :setting:`CELERY_AMQP_TASK_RESULT_EXPIRES` setting.
-
 .. _faq-use-celery-with-stomp:
 
 Can I use Celery with ActiveMQ/STOMP?
@@ -461,7 +454,8 @@ Tasks
 How can I reuse the same connection when applying tasks?
 --------------------------------------------------------
 
-**Answer**: See :ref:`executing-connections`.
+**Answer**: See the :setting:`BROKER_POOL_LIMIT` setting.
+This setting will be enabled by default in 3.0.
 
 .. _faq-execute-task-by-name:
 

+ 6 - 3
README.rst

@@ -4,7 +4,7 @@
 
 .. image:: http://cloud.github.com/downloads/ask/celery/celery_128.png
 
-:Version: 2.3.0
+:Version: 2.4.0rc1
 :Web: http://celeryproject.org/
 :Download: http://pypi.python.org/pypi/celery/
 :Source: http://github.com/ask/celery/
@@ -29,10 +29,11 @@ Celery is used in production systems to process millions of tasks a day.
 Celery is written in Python, but the protocol can be implemented in any
 language.  It can also `operate with other languages using webhooks`_.
 
-The recommended message broker is `RabbitMQ`_, but limited support for
+The recommended message broker is `RabbitMQ`_, but `limited support`_ for
 `Redis`_, `Beanstalk`_, `MongoDB`_, `CouchDB`_ and
 databases (using `SQLAlchemy`_ or the `Django ORM`_) is also available.
 
+
 Celery is easy to integrate with `Django`_, `Pylons`_ and `Flask`_, using
 the `django-celery`_, `celery-pylons`_ and `Flask-Celery`_ add-on packages.
 
@@ -53,6 +54,8 @@ the `django-celery`_, `celery-pylons`_ and `Flask-Celery`_ add-on packages.
 .. _`Flask-Celery`: http://github.com/ask/flask-celery/
 .. _`operate with other languages using webhooks`:
     http://ask.github.com/celery/userguide/remote-tasks.html
+.. _`limited support`:
+    http://kombu.readthedocs.org/en/latest/introduction.html#transport-comparison
 
 .. contents::
     :local:
@@ -253,7 +256,7 @@ Mailing list
 ------------
 
 For discussions about the usage, development, and future of celery,
-please join the `celery-users`_ mailing list. 
+please join the `celery-users`_ mailing list.
 
 .. _`celery-users`: http://groups.google.com/group/celery-users/
 

+ 7 - 5
celery/__init__.py

@@ -2,10 +2,12 @@
 # :copyright: (c) 2009 - 2011 by Ask Solem.
 # :license:   BSD, see LICENSE for more details.
 
+from __future__ import absolute_import
+
 import os
 import sys
 
-VERSION = (2, 3, 0)
+VERSION = (2, 4, 0, "rc1")
 
 __version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
 __author__ = "Ask Solem"
@@ -20,14 +22,14 @@ if sys.version_info < (2, 5):
 
 
 def Celery(*args, **kwargs):
-    from celery.app import App
+    from .app import App
     return App(*args, **kwargs)
 
 if not os.environ.get("CELERY_NO_EVAL", False):
-    from celery.local import LocalProxy
+    from .local import Proxy
 
     def _get_current_app():
-        from celery.app import current_app
+        from .app import current_app
         return current_app()
 
-    current_app = LocalProxy(_get_current_app)
+    current_app = Proxy(_get_current_app)

+ 54 - 29
celery/app/__init__.py

@@ -8,17 +8,19 @@ Celery Application.
 :license: BSD, see LICENSE for more details.
 
 """
+
+from __future__ import absolute_import
+
 import os
 import threading
 
 from functools import wraps
 from inspect import getargspec
 
-from kombu.utils import cached_property
+from .. import registry
+from ..utils import cached_property, instantiate
 
-from celery import registry
-from celery.app import base
-from celery.utils import instantiate
+from . import base
 
 # Apps with the :attr:`~celery.app.base.BaseApp.set_as_current` attribute
 # sets this, so it will always contain the last instantiated app,
@@ -27,6 +29,35 @@ _tls = threading.local()
 _tls.current_app = None
 
 
+class AppPickler(object):
+
+    def __call__(self, cls, *args):
+        kwargs = self.build_kwargs(*args)
+        app = self.construct(cls, **kwargs)
+        self.prepare(app, **kwargs)
+        return app
+
+    def prepare(self, app, **kwargs):
+        app.conf.update(kwargs["changes"])
+
+    def build_kwargs(self, *args):
+        return self.build_standard_kwargs(*args)
+
+    def build_standard_kwargs(self, main, changes, loader, backend, amqp,
+            events, log, control, accept_magic_kwargs):
+        return dict(main=main, loader=loader, backend=backend, amqp=amqp,
+                    changes=changes, events=events, log=log, control=control,
+                    set_as_current=False,
+                    accept_magic_kwargs=accept_magic_kwargs)
+
+    def construct(self, cls, **kwargs):
+        return cls(**kwargs)
+
+
+def _unpickle_app(cls, pickler, *args):
+    return pickler()(cls, *args)
+
+
 class App(base.BaseApp):
     """Celery Application.
 
@@ -43,6 +74,7 @@ class App(base.BaseApp):
     :keyword set_as_current:  Make this the global current app.
 
     """
+    Pickler = AppPickler
 
     def set_current(self):
         """Make this the current app for this thread."""
@@ -57,7 +89,7 @@ class App(base.BaseApp):
         taken from this app."""
         conf = self.conf
 
-        from celery.app.task import BaseTask
+        from .task import BaseTask
 
         class Task(BaseTask):
             abstract = True
@@ -89,14 +121,14 @@ class App(base.BaseApp):
 
     def TaskSet(self, *args, **kwargs):
         """Create new :class:`~celery.task.sets.TaskSet`."""
-        from celery.task.sets import TaskSet
+        from ..task.sets import TaskSet
         kwargs["app"] = self
         return TaskSet(*args, **kwargs)
 
     def worker_main(self, argv=None):
         """Run :program:`celeryd` using `argv`.  Uses :data:`sys.argv`
         if `argv` is not specified."""
-        from celery.bin.celeryd import WorkerCommand
+        from ..bin.celeryd import WorkerCommand
         return WorkerCommand(app=self).execute_from_commandline(argv)
 
     def task(self, *args, **options):
@@ -170,26 +202,19 @@ class App(base.BaseApp):
         # Reduce only pickles the configuration changes,
         # so the default configuration doesn't have to be passed
         # between processes.
-        return (_unpickle_app, (self.__class__,
-                                self.main,
-                                self.conf.changes,
-                                self.loader_cls,
-                                self.backend_cls,
-                                self.amqp_cls,
-                                self.events_cls,
-                                self.log_cls,
-                                self.control_cls,
-                                self.accept_magic_kwargs))
-
-
-def _unpickle_app(cls, main, changes, loader, backend, amqp,
-        events, log, control, accept_magic_kwargs):
-    app = cls(main, loader=loader, backend=backend, amqp=amqp,
-                    events=events, log=log, control=control,
-                    set_as_current=False,
-                    accept_magic_kwargs=accept_magic_kwargs)
-    app.conf.update(changes)
-    return app
+        return (_unpickle_app, (self.__class__, self.Pickler)
+                              + self.__reduce_args__())
+
+    def __reduce_args__(self):
+        return (self.main,
+                self.conf.changes,
+                self.loader_cls,
+                self.backend_cls,
+                self.amqp_cls,
+                self.events_cls,
+                self.log_cls,
+                self.control_cls,
+                self.accept_magic_kwargs)
 
 
 #: The "default" loader is the default loader used by old applications.
@@ -222,12 +247,12 @@ def _app_or_default_trace(app=None):  # pragma: no cover
     from multiprocessing import current_process
     if app is None:
         if getattr(_tls, "current_app", None):
-            print("-- RETURNING TO CURRENT APP --")
+            print("-- RETURNING TO CURRENT APP --")  # noqa+
             print_stack()
             return _tls.current_app
         if current_process()._name == "MainProcess":
             raise Exception("DEFAULT APP")
-        print("-- RETURNING TO DEFAULT APP --")
+        print("-- RETURNING TO DEFAULT APP --")      # noqa+
         print_stack()
         return default_app
     return app

+ 6 - 5
celery/app/amqp.py

@@ -9,16 +9,17 @@ AMQ related functionality.
 :license: BSD, see LICENSE for more details.
 
 """
+from __future__ import absolute_import
+
 from datetime import datetime, timedelta
 
 from kombu import BrokerConnection, Exchange
 from kombu import compat as messaging
 from kombu.pools import ProducerPool
-from kombu.utils import cached_property
 
-from celery import routes as _routes
-from celery import signals
-from celery.utils import gen_unique_id, textindent
+from .. import routes as _routes
+from .. import signals
+from ..utils import cached_property, textindent, uuid
 
 #: List of known options to a Kombu producers send method.
 #: Used to extract the message related options out of any `dict`.
@@ -199,7 +200,7 @@ class TaskPublisher(messaging.Publisher):
                     exchange_type or self.exchange_type, retry, _retry_policy)
             _exchanges_declared.add(exchange)
 
-        task_id = task_id or gen_unique_id()
+        task_id = task_id or uuid()
         task_args = task_args or []
         task_kwargs = task_kwargs or {}
         if not isinstance(task_args, (list, tuple)):

+ 27 - 32
celery/app/base.py

@@ -11,19 +11,19 @@ Application Base Class.
 from __future__ import absolute_import
 from __future__ import with_statement
 
+import os
 import platform as _platform
-import sys
 
 from contextlib import contextmanager
 from copy import deepcopy
 from functools import wraps
 from threading import Lock
 
-from kombu.utils import cached_property
+from .. import datastructures
+from .. import platforms
+from ..utils import cached_property, instantiate, lpmerge
 
-from celery import datastructures
-from celery.app.defaults import DEFAULTS
-from celery.utils import instantiate, lpmerge
+from .defaults import DEFAULTS, find_deprecated_settings
 
 import kombu
 if kombu.VERSION < (1, 1, 0):
@@ -36,22 +36,8 @@ settings -> transport:%(transport)s results:%(results)s
 """
 
 
-def pyimplementation():
-    if hasattr(_platform, "python_implementation"):
-        return _platform.python_implementation()
-    elif sys.platform.startswith("java"):
-        return "Jython %s" % (sys.platform, )
-    elif hasattr(sys, "pypy_version_info"):
-        v = ".".join(map(str, sys.pypy_version_info[:3]))
-        if sys.pypy_version_info[3:]:
-            v += "-" + "".join(map(str, sys.pypy_version_info[3:]))
-        return "PyPy %s" % (v, )
-    else:
-        return "CPython"
-
-
 class LamportClock(object):
-    """Lamports logical clock.
+    """Lamport's logical clock.
 
     From Wikipedia:
 
@@ -80,7 +66,7 @@ class LamportClock(object):
 
     When sending a message use :meth:`forward` to increment the clock,
     when receiving a message use :meth:`adjust` to sync with
-    the timestamp of the incoming message.
+    the time stamp of the incoming message.
 
     """
     #: The clocks current value.
@@ -120,12 +106,19 @@ class Settings(datastructures.ConfigurationView):
         """Deprecated compat alias to :attr:`BROKER_TRANSPORT`."""
         return self.BROKER_TRANSPORT
 
+    @property
+    def BROKER_HOST(self):
+
+        return (os.environ.get("CELERY_BROKER_URL") or
+                self.get("BROKER_URL") or
+                self.get("BROKER_HOST"))
+
 
 class BaseApp(object):
     """Base class for apps."""
-    SYSTEM = _platform.system()
-    IS_OSX = SYSTEM == "Darwin"
-    IS_WINDOWS = SYSTEM == "Windows"
+    SYSTEM = platforms.SYSTEM
+    IS_OSX = platforms.IS_OSX
+    IS_WINDOWS = platforms.IS_WINDOWS
 
     amqp_cls = "celery.app.amqp.AMQP"
     backend_cls = None
@@ -138,7 +131,7 @@ class BaseApp(object):
 
     def __init__(self, main=None, loader=None, backend=None,
             amqp=None, events=None, log=None, control=None,
-            set_as_current=True, accept_magic_kwargs=False):
+            set_as_current=True, accept_magic_kwargs=False, **kwargs):
         self.main = main
         self.amqp_cls = amqp or self.amqp_cls
         self.backend_cls = backend or self.backend_cls
@@ -228,13 +221,13 @@ class BaseApp(object):
 
     def AsyncResult(self, task_id, backend=None, task_name=None):
         """Create :class:`celery.result.BaseAsyncResult` instance."""
-        from celery.result import BaseAsyncResult
+        from ..result import BaseAsyncResult
         return BaseAsyncResult(task_id, app=self, task_name=task_name,
                                backend=backend or self.backend)
 
     def TaskSetResult(self, taskset_id, results, **kwargs):
         """Create :class:`celery.result.TaskSetResult` instance."""
-        from celery.result import TaskSetResult
+        from ..result import TaskSetResult
         return TaskSetResult(taskset_id, results, app=self)
 
     def broker_connection(self, hostname=None, userid=None,
@@ -308,6 +301,7 @@ class BaseApp(object):
 
     def prepare_config(self, c):
         """Prepare configuration before it is merged with the defaults."""
+        find_deprecated_settings(c)
         return c
 
     def mail_admins(self, subject, body, fail_silently=False):
@@ -321,7 +315,8 @@ class BaseApp(object):
                                        user=self.conf.EMAIL_HOST_USER,
                                        password=self.conf.EMAIL_HOST_PASSWORD,
                                        timeout=self.conf.EMAIL_TIMEOUT,
-                                       use_ssl=self.conf.EMAIL_USE_SSL)
+                                       use_ssl=self.conf.EMAIL_USE_SSL,
+                                       use_tls=self.conf.EMAIL_USE_TLS)
 
     def either(self, default_key, *values):
         """Fallback to the value of a configuration key if none of the
@@ -337,7 +332,7 @@ class BaseApp(object):
         return lpmerge(l, r)
 
     def _get_backend(self):
-        from celery.backends import get_backend_cls
+        from ..backends import get_backend_cls
         backend_cls = self.backend_cls or self.conf.CELERY_RESULT_BACKEND
         backend_cls = get_backend_cls(backend_cls, loader=self.loader)
         return backend_cls(app=self)
@@ -356,7 +351,7 @@ class BaseApp(object):
         import kombu
         return BUGREPORT_INFO % {"system": _platform.system(),
                                  "arch": _platform.architecture(),
-                                 "py_i": pyimplementation(),
+                                 "py_i": platforms.pyimplementation(),
                                  "celery_v": celery.__version__,
                                  "kombu_v": kombu.__version__,
                                  "py_v": _platform.python_version(),
@@ -382,7 +377,7 @@ class BaseApp(object):
 
     @cached_property
     def backend(self):
-        """Storing/retreiving task state.  See
+        """Storing/retrieving task state.  See
         :class:`~celery.backend.base.BaseBackend`."""
         return self._get_backend()
 
@@ -405,7 +400,7 @@ class BaseApp(object):
     @cached_property
     def loader(self):
         """Current loader."""
-        from celery.loaders import get_loader_cls
+        from ..loaders import get_loader_cls
         return get_loader_cls(self.loader_cls)(app=self)
 
     @cached_property

+ 48 - 19
celery/app/defaults.py

@@ -1,5 +1,8 @@
+from __future__ import absolute_import
+
 import sys
 
+from collections import deque
 from datetime import timedelta
 
 is_jython = sys.platform.startswith("java")
@@ -32,12 +35,17 @@ def str_to_bool(term, table={"false": False, "no": False, "0": False,
 
 
 class Option(object):
+    alt = None
+    deprecate_by = None
+    remove_by = None
     typemap = dict(string=str, int=int, float=float, any=lambda v: v,
                    bool=str_to_bool, dict=dict, tuple=tuple)
 
     def __init__(self, default=None, *args, **kwargs):
         self.default = default
         self.type = kwargs.get("type") or "string"
+        for attr, value in kwargs.iteritems():
+            setattr(self, attr, value)
 
     def to_python(self, value):
         return self.typemap[self.type](value)
@@ -45,6 +53,7 @@ class Option(object):
 
 NAMESPACES = {
     "BROKER": {
+        "URL": Option(None, type="string"),
         "HOST": Option(None, type="string"),
         "PORT": Option(type="int"),
         "USER": Option(None, type="string"),
@@ -54,7 +63,8 @@ NAMESPACES = {
         "CONNECTION_RETRY": Option(True, type="bool"),
         "CONNECTION_MAX_RETRIES": Option(100, type="int"),
         "POOL_LIMIT": Option(None, type="int"),
-        "INSIST": Option(False, type="bool"),
+        "INSIST": Option(False, type="bool",
+                         deprecate_by="2.4", remove_by="3.0"),
         "USE_SSL": Option(False, type="bool"),
         "TRANSPORT": Option(None, type="string"),
         "TRANSPORT_OPTIONS": Option({}, type="dict"),
@@ -62,8 +72,11 @@ NAMESPACES = {
     "CELERY": {
         "ACKS_LATE": Option(False, type="bool"),
         "ALWAYS_EAGER": Option(False, type="bool"),
-        "AMQP_TASK_RESULT_EXPIRES": Option(type="int"),
-        "AMQP_TASK_RESULT_CONNECTION_MAX": Option(1, type="int"),
+        "AMQP_TASK_RESULT_EXPIRES": Option(type="int",
+                deprecate_by="2.5", remove_by="3.0",
+                alt="CELERY_TASK_RESULT_EXPIRES"),
+        "AMQP_TASK_RESULT_CONNECTION_MAX": Option(1, type="int",
+            remove_by="2.5", alt="BROKER_POOL_LIMIT"),
         "BROADCAST_QUEUE": Option("celeryctl"),
         "BROADCAST_EXCHANGE": Option("celeryctl"),
         "BROADCAST_EXCHANGE_TYPE": Option("fanout"),
@@ -89,6 +102,7 @@ NAMESPACES = {
         "REDIS_DB": Option(None, type="int"),
         "REDIS_PASSWORD": Option(None, type="string"),
         "RESULT_BACKEND": Option(None, type="string"),
+        "RESULT_DB_SHORT_LIVED_SESSIONS": Option(False, type="bool"),
         "RESULT_DBURI": Option(),
         "RESULT_ENGINE_OPTIONS": Option(None, type="dict"),
         "RESULT_EXCHANGE": Option("celeryresults"),
@@ -100,7 +114,8 @@ NAMESPACES = {
         "SEND_TASK_ERROR_EMAILS": Option(False, type="bool"),
         "SEND_TASK_SENT_EVENT": Option(False, type="bool"),
         "STORE_ERRORS_EVEN_IF_IGNORED": Option(False, type="bool"),
-        "TASK_ERROR_WHITELIST": Option((), type="tuple"),
+        "TASK_ERROR_WHITELIST": Option((), type="tuple",
+            deprecate_by="2.5", remove_by="3.0"),
         "TASK_PUBLISH_RETRY": Option(True, type="bool"),
         "TASK_PUBLISH_RETRY_POLICY": Option({
                 "max_retries": 100,
@@ -123,8 +138,9 @@ NAMESPACES = {
         "CONSUMER": Option("celery.worker.consumer.Consumer"),
         "LOG_FORMAT": Option(DEFAULT_PROCESS_LOG_FMT),
         "LOG_COLOR": Option(type="bool"),
-        "LOG_LEVEL": Option("WARN"),
-        "LOG_FILE": Option(),
+        "LOG_LEVEL": Option("WARN", deprecate_by="2.4", remove_by="3.0",
+                            alt="--loglevel argument"),
+        "LOG_FILE": Option(deprecate_by="2.4", remove_by="3.0"),
         "MEDIATOR": Option("celery.worker.mediator.Mediator"),
         "MAX_TASKS_PER_CHILD": Option(type="int"),
         "POOL": Option(DEFAULT_POOL),
@@ -140,12 +156,12 @@ NAMESPACES = {
         "SCHEDULER": Option("celery.beat.PersistentScheduler"),
         "SCHEDULE_FILENAME": Option("celerybeat-schedule"),
         "MAX_LOOP_INTERVAL": Option(5 * 60, type="int"),
-        "LOG_LEVEL": Option("INFO"),
-        "LOG_FILE": Option(),
+        "LOG_LEVEL": Option("INFO", deprecate_by="2.4", remove_by="3.0"),
+        "LOG_FILE": Option(deprecate_by="2.4", remove_by="3.0"),
     },
     "CELERYMON": {
-        "LOG_LEVEL": Option("INFO"),
-        "LOG_FILE": Option(),
+        "LOG_LEVEL": Option("INFO", deprecate_by="2.4", remove_by="3.0"),
+        "LOG_FILE": Option(deprecate_by="2.4", remove_by="3.0"),
         "LOG_FORMAT": Option(DEFAULT_LOG_FMT),
     },
     "EMAIL": {
@@ -155,6 +171,7 @@ NAMESPACES = {
         "HOST_PASSWORD": Option(None),
         "TIMEOUT": Option(2, type="int"),
         "USE_SSL": Option(False, type="bool"),
+        "USE_TLS": Option(False, type="bool"),
     },
     "SERVER_EMAIL": Option("celery@localhost"),
     "ADMINS": Option((), type="tuple"),
@@ -165,13 +182,25 @@ NAMESPACES = {
 }
 
 
-def _flatten(d, ns=""):
-    acc = []
-    for key, value in d.iteritems():
-        if isinstance(value, dict):
-            acc.extend(_flatten(value, ns=key + '_'))
-        else:
-            acc.append((ns + key, value.default))
-    return acc
+def flatten(d, ns=""):
+    stack = deque([(ns, d)])
+    while stack:
+        name, space = stack.popleft()
+        for key, value in space.iteritems():
+            if isinstance(value, dict):
+                stack.append((name + key + '_', value))
+            else:
+                yield name + key, value
+
+
+def find_deprecated_settings(source):
+    from celery.utils import warn_deprecated
+    for name, opt in flatten(NAMESPACES):
+        if (opt.deprecate_by or opt.remove_by) and getattr(source, name, None):
+            warn_deprecated(description="The %r setting" % (name, ),
+                            deprecation=opt.deprecate_by,
+                            removal=opt.remove_by,
+                            alternative=opt.alt)
+
 
-DEFAULTS = dict(_flatten(NAMESPACES))
+DEFAULTS = dict((key, value.default) for key, value in flatten(NAMESPACES))

+ 27 - 11
celery/app/task/__init__.py

@@ -1,13 +1,16 @@
 # -*- coding: utf-8 -*-"
+from __future__ import absolute_import
+
 import sys
 import threading
 
-from celery.datastructures import ExceptionInfo
-from celery.exceptions import MaxRetriesExceededError, RetryTaskError
-from celery.execute.trace import TaskTrace
-from celery.registry import tasks, _unpickle_task
-from celery.result import EagerResult
-from celery.utils import mattrgetter, gen_unique_id, fun_takes_kwargs
+from ...datastructures import ExceptionInfo
+from ...exceptions import MaxRetriesExceededError, RetryTaskError
+from ...execute.trace import TaskTrace
+from ...registry import tasks, _unpickle_task
+from ...result import EagerResult
+from ...utils import fun_takes_kwargs, mattrgetter, uuid
+from ...utils.mail import ErrorMail
 
 extract_exec_options = mattrgetter("queue", "routing_key",
                                    "exchange", "immediate",
@@ -20,6 +23,7 @@ class Context(threading.local):
     # Default context
     logfile = None
     loglevel = None
+    hostname = None
     id = None
     args = None
     kwargs = None
@@ -28,6 +32,7 @@ class Context(threading.local):
     delivery_info = None
     taskset = None
     chord = None
+    called_directly = True
 
     def update(self, d, **kwargs):
         self.__dict__.update(d, **kwargs)
@@ -43,7 +48,7 @@ class Context(threading.local):
 
 
 class TaskType(type):
-    """Metaclass for tasks.
+    """Meta class for tasks.
 
     Automatically registers the task in the task registry, except
     if the `abstract` attribute is set.
@@ -99,6 +104,7 @@ class BaseTask(object):
     """
     __metaclass__ = TaskType
 
+    ErrorMail = ErrorMail
     MaxRetriesExceededError = MaxRetriesExceededError
 
     #: The application instance associated with this task class.
@@ -216,7 +222,7 @@ class BaseTask(object):
     #: worker crashes mid execution (which may be acceptable for some
     #: applications).
     #:
-    #: The application default can be overriden with the
+    #: The application default can be overridden with the
     #: :setting:`CELERY_ACKS_LATE` setting.
     acks_late = False
 
@@ -374,7 +380,7 @@ class BaseTask(object):
         :keyword exchange: The named exchange to send the task to.
                            Defaults to the :attr:`exchange` attribute.
 
-        :keyword exchange_type: The exchange type to initalize the exchange
+        :keyword exchange_type: The exchange type to initialize the exchange
                                 if not already declared.  Defaults to the
                                 :attr:`exchange_type` attribute.
 
@@ -495,6 +501,11 @@ class BaseTask(object):
         kwargs = request.kwargs if kwargs is None else kwargs
         delivery_info = request.delivery_info
 
+        # Not in worker or emulated by (apply/always_eager),
+        # so just raise the original exception.
+        if request.called_directly:
+            raise exc or RetryTaskError("Task can be retried", None)
+
         if delivery_info:
             options.setdefault("exchange", delivery_info.get("exchange"))
             options.setdefault("routing_key", delivery_info.get("routing_key"))
@@ -538,7 +549,7 @@ class BaseTask(object):
         """
         args = args or []
         kwargs = kwargs or {}
-        task_id = options.get("task_id") or gen_unique_id()
+        task_id = options.get("task_id") or uuid()
         retries = options.get("retries", 0)
         throw = self.app.either("CELERY_EAGER_PROPAGATES_EXCEPTIONS",
                                 options.pop("throw", None))
@@ -652,6 +663,11 @@ class BaseTask(object):
         """
         pass
 
+    def send_error_email(self, context, exc, **kwargs):
+        if self.send_error_emails and not self.disable_error_emails:
+            sender = self.ErrorMail(self, **kwargs)
+            sender.send(context, exc)
+
     def on_success(self, retval, task_id, args, kwargs):
         """Success handler.
 
@@ -689,7 +705,7 @@ class BaseTask(object):
         """Returns :class:`~celery.task.sets.subtask` object for
         this task, wrapping arguments and execution options
         for a single task invocation."""
-        from celery.task.sets import subtask
+        from ...task.sets import subtask
         return subtask(cls, *args, **kwargs)
 
     @property

+ 11 - 10
celery/apps/beat.py

@@ -1,14 +1,15 @@
+from __future__ import absolute_import
+
 import atexit
 import socket
 import sys
 import traceback
 
-from celery import __version__
-from celery import beat
-from celery import platforms
-from celery.app import app_or_default
-from celery.utils import get_full_cls_name, LOG_LEVELS
-from celery.utils.timeutils import humanize_seconds
+from .. import __version__, platforms
+from .. import beat
+from ..app import app_or_default
+from ..utils import get_full_cls_name, LOG_LEVELS
+from ..utils.timeutils import humanize_seconds
 
 STARTUP_INFO_FMT = """
 Configuration ->
@@ -80,15 +81,15 @@ class Beat(object):
                   c.blue("        _\n"),
                   c.reset(self.startup_info(beat)))))
         if self.socket_timeout:
-            logger.debug("Setting default socket timeout to %r" % (
-                self.socket_timeout))
+            logger.debug("Setting default socket timeout to %r",
+                         self.socket_timeout)
             socket.setdefaulttimeout(self.socket_timeout)
         try:
             self.install_sync_handler(beat)
             beat.start()
         except Exception, exc:
-            logger.critical("celerybeat raised exception %s: %r\n%s" % (
-                            exc.__class__, exc, traceback.format_exc()),
+            logger.critical("celerybeat raised exception %s: %r\n%s",
+                            exc.__class__, exc, traceback.format_exc(),
                             exc_info=sys.exc_info())
 
     def init_loader(self):

+ 21 - 13
celery/apps/worker.py

@@ -11,13 +11,18 @@ import socket
 import sys
 import warnings
 
-from celery import __version__
-from celery import platforms
-from celery import signals
-from celery.app import app_or_default
-from celery.exceptions import ImproperlyConfigured, SystemTerminate
-from celery.utils import get_full_cls_name, LOG_LEVELS, cry
-from celery.worker import WorkController
+from .. import __version__, platforms, signals
+from ..app import app_or_default
+from ..exceptions import ImproperlyConfigured, SystemTerminate
+from ..utils import get_full_cls_name, isatty, LOG_LEVELS, cry
+from ..worker import WorkController
+
+try:
+    from greenlet import GreenletExit
+    IGNORE_ERRORS = (GreenletExit, )
+except ImportError:
+    IGNORE_ERRORS = ()
+
 
 BANNER = """
  -------------- celery@%(hostname)s v%(version)s
@@ -100,7 +105,7 @@ class Worker(object):
         if autoscale:
             max_c, _, min_c = autoscale.partition(",")
             self.autoscale = [int(max_c), min_c and int(min_c) or 0]
-        self._isatty = sys.stdout.isatty()
+        self._isatty = isatty(sys.stdout)
 
         self.colored = app.log.colored(self.logfile)
 
@@ -137,7 +142,10 @@ class Worker(object):
               str(self.colored.reset(self.extra_info())))
         self.set_process_status("-active-")
 
-        self.run_worker()
+        try:
+            self.run_worker()
+        except IGNORE_ERRORS:
+            pass
 
     def on_consumer_ready(self, consumer):
         signals.worker_ready.send(sender=consumer)
@@ -183,7 +191,7 @@ class Worker(object):
         self.loader.init_worker()
 
     def tasklist(self, include_builtins=True):
-        from celery.registry import tasks
+        from ..registry import tasks
         tasklist = tasks.keys()
         if not include_builtins:
             tasklist = filter(lambda s: not s.startswith("celery."),
@@ -339,14 +347,14 @@ def install_cry_handler(logger):
         platforms.signals["SIGUSR1"] = cry_handler
 
 
-def install_rdb_handler():  # pragma: no cover
+def install_rdb_handler(envvar="CELERY_RDBSIG"):  # pragma: no cover
 
     def rdb_handler(signum, frame):
         """Signal handler setting a rdb breakpoint at the current frame."""
-        from celery.contrib import rdb
+        from ..contrib import rdb
         rdb.set_trace(frame)
 
-    if os.environ.get("CELERY_RDBSIG"):
+    if os.environ.get(envvar):
         platforms.signals["SIGUSR2"] = rdb_handler
 
 

+ 14 - 15
celery/backends/__init__.py

@@ -1,6 +1,9 @@
-from celery import current_app
-from celery.local import LocalProxy
-from celery.utils import get_cls_by_name
+from __future__ import absolute_import
+
+from .. import current_app
+from ..local import Proxy
+from ..utils import get_cls_by_name
+from ..utils.functional import memoize
 
 BACKEND_ALIASES = {
     "amqp": "celery.backends.amqp.AMQPBackend",
@@ -13,23 +16,19 @@ BACKEND_ALIASES = {
     "disabled": "celery.backends.base.DisabledBackend",
 }
 
-_backend_cache = {}
-
 
+@memoize(100)
 def get_backend_cls(backend=None, loader=None):
     """Get backend class by name/alias"""
     backend = backend or "disabled"
     loader = loader or current_app.loader
-    if backend not in _backend_cache:
-        aliases = dict(BACKEND_ALIASES, **loader.override_backends)
-        try:
-            _backend_cache[backend] = get_cls_by_name(backend, aliases)
-        except ValueError, exc:
-            raise ValueError("Unknown result backend: %r.  "
-                             "Did you spell it correctly?  (%s)" % (backend,
-                                                                    exc))
-    return _backend_cache[backend]
+    aliases = dict(BACKEND_ALIASES, **loader.override_backends)
+    try:
+        return get_cls_by_name(backend, aliases)
+    except ValueError, exc:
+        raise ValueError("Unknown result backend: %r.  "
+                         "Did you spell it correctly?  (%s)" % (backend, exc))
 
 
 # deprecate this.
-default_backend = LocalProxy(lambda: current_app.backend)
+default_backend = Proxy(lambda: current_app.backend)

+ 34 - 19
celery/backends/amqp.py

@@ -11,9 +11,10 @@ from itertools import count
 from kombu.entity import Exchange, Queue
 from kombu.messaging import Consumer, Producer
 
-from celery import states
-from celery.backends.base import BaseDictBackend
-from celery.exceptions import TimeoutError
+from .. import states
+from ..exceptions import TimeoutError
+
+from .base import BaseDictBackend
 
 
 class BacklogLimitExceeded(Exception):
@@ -38,7 +39,7 @@ class AMQPBackend(BaseDictBackend):
 
     def __init__(self, connection=None, exchange=None, exchange_type=None,
             persistent=None, serializer=None, auto_delete=True,
-            expires=None, connection_max=None, **kwargs):
+            **kwargs):
         super(AMQPBackend, self).__init__(**kwargs)
         conf = self.app.conf
         self._connection = connection
@@ -55,14 +56,20 @@ class AMQPBackend(BaseDictBackend):
                                       auto_delete=auto_delete)
         self.serializer = serializer or conf.CELERY_RESULT_SERIALIZER
         self.auto_delete = auto_delete
-        self.expires = (conf.CELERY_AMQP_TASK_RESULT_EXPIRES if expires is None
-                                                             else expires)
-        if self.expires is not None:
-            self.expires = self.prepare_expires(self.expires)
-            # x-expires requires RabbitMQ 2.1.0 or higher.
-            self.queue_arguments["x-expires"] = self.expires * 1000.0
-        self.connection_max = (connection_max or
-                               conf.CELERY_AMQP_TASK_RESULT_CONNECTION_MAX)
+
+        # AMQP_TASK_RESULT_EXPIRES setting is deprecated and will be
+        # removed in version 3.0.
+        dexpires = conf.CELERY_AMQP_TASK_RESULT_EXPIRES
+
+        self.expires = None
+        if "expires" in kwargs:
+            if kwargs["expires"] is not None:
+                self.expires = self.prepare_expires(kwargs["expires"])
+        else:
+            self.expires = self.prepare_expires(dexpires)
+
+        if self.expires:
+            self.queue_arguments["x-expires"] = int(self.expires * 1000)
         self.mutex = threading.Lock()
 
     def _create_binding(self, task_id):
@@ -85,12 +92,10 @@ class AMQPBackend(BaseDictBackend):
 
     def _publish_result(self, connection, task_id, meta):
         # cache single channel
-        if hasattr(connection, "_result_producer_chan") and \
-                connection._result_producer_chan is not None and \
-                connection._result_producer_chan.connection is not None:
-            channel = connection._result_producer_chan
-        else:
-            channel = connection._result_producer_chan = connection.channel()
+        if connection._default_channel is not None and \
+                connection._default_channel.connection is None:
+            connection.maybe_close_channel(connection._default_channel)
+        channel = connection.default_channel
 
         self._create_producer(task_id, channel).publish(meta)
 
@@ -105,7 +110,6 @@ class AMQPBackend(BaseDictBackend):
             with self.app.pool.acquire(block=True) as conn:
 
                 def errback(error, delay):
-                    conn._result_producer_chan = None
                     print("Couldn't send result for %r: %r. Retry in %rs." % (
                             task_id, error, delay))
 
@@ -234,3 +238,14 @@ class AMQPBackend(BaseDictBackend):
     def delete_taskset(self, taskset_id):
         raise NotImplementedError(
                 "delete_taskset is not supported by this backend.")
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(connection=self._connection,
+                 exchange=self.exchange.name,
+                 exchange_type=self.exchange.type,
+                 persistent=self.persistent,
+                 serializer=self.serializer,
+                 auto_delete=self.auto_delete,
+                 expires=self.expires))
+        return super(AMQPBackend, self).__reduce__(args, kwargs)

+ 65 - 35
celery/backends/base.py

@@ -1,16 +1,29 @@
 """celery.backends.base"""
+from __future__ import absolute_import
+
 import time
 import sys
 
 from datetime import timedelta
 
-from celery import states
-from celery.exceptions import TimeoutError, TaskRevokedError
-from celery.utils import timeutils
-from celery.utils.serialization import pickle, get_pickled_exception
-from celery.utils.serialization import get_pickleable_exception
-from celery.utils.serialization import create_exception_cls
-from celery.datastructures import LocalCache
+from kombu import serialization
+
+from .. import states
+from ..datastructures import LRUCache
+from ..exceptions import TimeoutError, TaskRevokedError
+from ..utils import timeutils
+from ..utils.encoding import from_utf8
+from ..utils.serialization import (get_pickled_exception,
+                                   get_pickleable_exception,
+                                   create_exception_cls)
+
+EXCEPTION_ABLE_CODECS = frozenset(["pickle", "yaml"])
+is_py3k = sys.version_info >= (3, 0)
+
+
+def unpickle_backend(cls, args, kwargs):
+    """Returns an unpickled backend."""
+    return cls(*args, **kwargs)
 
 
 class BaseBackend(object):
@@ -21,9 +34,29 @@ class BaseBackend(object):
 
     TimeoutError = TimeoutError
 
+    #: Time to sleep between polling each individual item
+    #: in `ResultSet.iterate`. as opposed to the `interval`
+    #: argument which is for each pass.
+    subpolling_interval = None
+
     def __init__(self, *args, **kwargs):
-        from celery.app import app_or_default
+        from ..app import app_or_default
         self.app = app_or_default(kwargs.get("app"))
+        self.serializer = kwargs.get("serializer",
+                                     self.app.conf.CELERY_RESULT_SERIALIZER)
+        (self.content_type,
+         self.content_encoding,
+         self.encoder) = serialization.registry._encoders[self.serializer]
+
+    def encode(self, data):
+        _, _, payload = serialization.encode(data, serializer=self.serializer)
+        return payload
+
+    def decode(self, payload):
+        payload = is_py3k and payload or str(payload)
+        return serialization.decode(payload,
+                                    content_type=self.content_type,
+                                    content_encoding=self.content_encoding)
 
     def prepare_expires(self, value, type=None):
         if value is None:
@@ -70,18 +103,15 @@ class BaseBackend(object):
 
     def prepare_exception(self, exc):
         """Prepare exception for serialization."""
-        if (self.app.conf["CELERY_RESULT_SERIALIZER"] in ("pickle", "yaml")):
+        if self.serializer in EXCEPTION_ABLE_CODECS:
             return get_pickleable_exception(exc)
-        return {
-            "exc_type": type(exc).__name__,
-            "exc_message": str(exc),
-        }
+        return {"exc_type": type(exc).__name__, "exc_message": str(exc)}
 
     def exception_to_python(self, exc):
         """Convert serialized exception to Python exception."""
-        if (self.app.conf["CELERY_RESULT_SERIALIZER"] in ("pickle", "yaml")):
+        if self.serializer in EXCEPTION_ABLE_CODECS:
             return get_pickled_exception(exc)
-        return create_exception_cls(exc["exc_type"].encode("utf-8"),
+        return create_exception_cls(from_utf8(exc["exc_type"]),
                                     sys.modules[__name__])
 
     def prepare_value(self, result):
@@ -173,19 +203,19 @@ class BaseBackend(object):
         pass
 
     def on_chord_apply(self, setid, body, *args, **kwargs):
-        from celery.registry import tasks
+        from ..registry import tasks
         tasks["celery.chord_unlock"].apply_async((setid, body, ), kwargs,
                                                  countdown=1)
 
-    def __reduce__(self):
-        return (self.__class__, ())
+    def __reduce__(self, args=(), kwargs={}):
+        return (unpickle_backend, (self.__class__, args, kwargs))
 
 
 class BaseDictBackend(BaseBackend):
 
     def __init__(self, *args, **kwargs):
         super(BaseDictBackend, self).__init__(*args, **kwargs)
-        self._cache = LocalCache(limit=kwargs.get("max_cached_results") or
+        self._cache = LRUCache(limit=kwargs.get("max_cached_results") or
                                  self.app.conf.CELERY_MAX_CACHED_RESULTS)
 
     def store_result(self, task_id, result, status, traceback=None, **kwargs):
@@ -218,11 +248,11 @@ class BaseDictBackend(BaseBackend):
             return meta["result"]
 
     def get_task_meta(self, task_id, cache=True):
-        if cache and task_id in self._cache:
+        if cache:
             try:
                 return self._cache[task_id]
             except KeyError:
-                pass   # backend emptied in the meantime
+                pass
 
         meta = self._get_task_meta_for(task_id)
         if cache and meta.get("status") == states.SUCCESS:
@@ -237,11 +267,11 @@ class BaseDictBackend(BaseBackend):
                                                         cache=False)
 
     def get_taskset_meta(self, taskset_id, cache=True):
-        if cache and taskset_id in self._cache:
+        if cache:
             try:
                 return self._cache[taskset_id]
             except KeyError:
-                pass  # backend emptied in the meantime
+                pass
 
         meta = self._restore_taskset(taskset_id)
         if cache and meta is not None:
@@ -296,12 +326,12 @@ class KeyValueStoreBackend(BaseDictBackend):
     def _mget_to_results(self, values, keys):
         if hasattr(values, "items"):
             # client returns dict so mapping preserved.
-            return dict((self._strip_prefix(k), pickle.loads(str(v)))
+            return dict((self._strip_prefix(k), self.decode(v))
                             for k, v in values.iteritems()
                                 if v is not None)
         else:
             # client returns list so need to recreate mapping.
-            return dict((keys[i], pickle.loads(str(value)))
+            return dict((keys[i], self.decode(value))
                             for i, value in enumerate(values)
                                 if value is not None)
 
@@ -319,6 +349,7 @@ class KeyValueStoreBackend(BaseDictBackend):
                     cached_ids.add(task_id)
 
         ids ^= cached_ids
+        iterations = 0
         while ids:
             keys = list(ids)
             r = self._mget_to_results(self.mget([self.get_key_for_task(k)
@@ -327,19 +358,22 @@ class KeyValueStoreBackend(BaseDictBackend):
             ids ^= set(r.keys())
             for key, value in r.iteritems():
                 yield key, value
+            if timeout and iterations * interval >= timeout:
+                raise TimeoutError("Operation timed out (%s)" % (timeout, ))
             time.sleep(interval)  # don't busy loop.
+            iterations += 0
 
     def _forget(self, task_id):
         self.delete(self.get_key_for_task(task_id))
 
     def _store_result(self, task_id, result, status, traceback=None):
         meta = {"status": status, "result": result, "traceback": traceback}
-        self.set(self.get_key_for_task(task_id), pickle.dumps(meta))
+        self.set(self.get_key_for_task(task_id), self.encode(meta))
         return result
 
     def _save_taskset(self, taskset_id, result):
         self.set(self.get_key_for_taskset(taskset_id),
-                 pickle.dumps({"result": result}))
+                 self.encode({"result": result}))
         return result
 
     def _delete_taskset(self, taskset_id):
@@ -350,17 +384,17 @@ class KeyValueStoreBackend(BaseDictBackend):
         meta = self.get(self.get_key_for_task(task_id))
         if not meta:
             return {"status": states.PENDING, "result": None}
-        return pickle.loads(str(meta))
+        return self.decode(meta)
 
     def _restore_taskset(self, taskset_id):
         """Get task metadata for a task by id."""
         meta = self.get(self.get_key_for_taskset(taskset_id))
         if meta:
-            meta = pickle.loads(str(meta))
-            return meta
+            return self.decode(meta)
 
 
 class DisabledBackend(BaseBackend):
+    _cache = {}   # need this attribute to reset cache in tests.
 
     def store_result(self, *args, **kwargs):
         pass
@@ -368,8 +402,4 @@ class DisabledBackend(BaseBackend):
     def _is_disabled(self, *args, **kwargs):
         raise NotImplementedError("No result backend configured.  "
                 "Please see the documentation for more information.")
-
-    wait_for = _is_disabled
-    get_status = _is_disabled
-    get_result = _is_disabled
-    get_traceback = _is_disabled
+    wait_for = get_status = get_result = get_traceback = _is_disabled

+ 28 - 8
celery/backends/cache.py

@@ -1,8 +1,11 @@
-from kombu.utils import cached_property
+from __future__ import absolute_import
 
-from celery.backends.base import KeyValueStoreBackend
-from celery.exceptions import ImproperlyConfigured
-from celery.datastructures import LocalCache
+from ..datastructures import LRUCache
+from ..exceptions import ImproperlyConfigured
+from ..utils import cached_property
+from ..utils.encoding import ensure_bytes
+
+from .base import KeyValueStoreBackend
 
 _imp = [None]
 
@@ -36,7 +39,7 @@ def get_best_memcache(*args, **kwargs):
 class DummyClient(object):
 
     def __init__(self, *args, **kwargs):
-        self.cache = LocalCache(5000)
+        self.cache = LRUCache(limit=5000)
 
     def get(self, key, *args, **kwargs):
         return self.cache.get(key)
@@ -59,6 +62,7 @@ backends = {"memcache": lambda: get_best_memcache,
 
 
 class CacheBackend(KeyValueStoreBackend):
+    servers = None
 
     def __init__(self, expires=None, backend=None, options={}, **kwargs):
         super(CacheBackend, self).__init__(self, **kwargs)
@@ -66,10 +70,11 @@ class CacheBackend(KeyValueStoreBackend):
         self.options = dict(self.app.conf.CELERY_CACHE_BACKEND_OPTIONS,
                             **options)
 
-        backend = backend or self.app.conf.CELERY_CACHE_BACKEND
-        self.backend, _, servers = backend.partition("://")
+        self.backend = backend or self.app.conf.CELERY_CACHE_BACKEND
+        if self.backend:
+            self.backend, _, servers = self.backend.partition("://")
+            self.servers = servers.rstrip('/').split(";")
         self.expires = self.prepare_expires(expires, type=int)
-        self.servers = servers.rstrip('/').split(";")
         try:
             self.Client = backends[self.backend]()
         except KeyError:
@@ -78,6 +83,12 @@ class CacheBackend(KeyValueStoreBackend):
                     "following backends: %s" % (self.backend,
                                                 ", ".join(backends.keys())))
 
+    def get_key_for_task(self, task_id):
+        return ensure_bytes(self.task_keyprefix) + ensure_bytes(task_id)
+
+    def get_key_for_taskset(self, taskset_id):
+        return ensure_bytes(self.taskset_keyprefix) + ensure_bytes(taskset_id)
+
     def get(self, key):
         return self.client.get(key)
 
@@ -93,3 +104,12 @@ class CacheBackend(KeyValueStoreBackend):
     @cached_property
     def client(self):
         return self.Client(self.servers, **self.options)
+
+    def __reduce__(self, args=(), kwargs={}):
+        servers = ";".join(self.servers)
+        backend = "%s://%s/" % (self.backend, servers)
+        kwargs.update(
+            dict(backend=backend,
+                 expires=self.expires,
+                 options=self.options))
+        return super(CacheBackend, self).__reduce__(args, kwargs)

+ 20 - 10
celery/backends/cassandra.py

@@ -1,4 +1,6 @@
 """celery.backends.cassandra"""
+from __future__ import absolute_import
+
 try:
     import pycassa
     from thrift import Thrift
@@ -11,11 +13,11 @@ import time
 
 from datetime import datetime
 
-from celery.backends.base import BaseDictBackend
-from celery.exceptions import ImproperlyConfigured
-from celery.utils.serialization import pickle
-from celery.utils.timeutils import maybe_timedelta, timedelta_seconds
-from celery import states
+from .. import states
+from ..exceptions import ImproperlyConfigured
+from ..utils.timeutils import maybe_timedelta, timedelta_seconds
+
+from .base import BaseDictBackend
 
 
 class CassandraBackend(BaseDictBackend):
@@ -100,7 +102,7 @@ class CassandraBackend(BaseDictBackend):
                     Thrift.TException), exc:
                 if time.time() > ts:
                     raise
-                self.logger.warn('Cassandra error: %r. Retrying...' % (exc, ))
+                self.logger.warn('Cassandra error: %r. Retrying...', exc)
                 time.sleep(self._retry_wait)
 
     def _get_column_family(self):
@@ -124,9 +126,9 @@ class CassandraBackend(BaseDictBackend):
             cf = self._get_column_family()
             date_done = datetime.utcnow()
             meta = {"status": status,
-                    "result": pickle.dumps(result),
+                    "result": self.encode(result),
                     "date_done": date_done.strftime('%Y-%m-%dT%H:%M:%SZ'),
-                    "traceback": pickle.dumps(traceback)}
+                    "traceback": self.encode(traceback)}
             cf.insert(task_id, meta,
                       ttl=timedelta_seconds(self.expires))
 
@@ -142,12 +144,20 @@ class CassandraBackend(BaseDictBackend):
                 meta = {
                     "task_id": task_id,
                     "status": obj["status"],
-                    "result": pickle.loads(str(obj["result"])),
+                    "result": self.decode(obj["result"]),
                     "date_done": obj["date_done"],
-                    "traceback": pickle.loads(str(obj["traceback"])),
+                    "traceback": self.decode(obj["traceback"]),
                 }
             except (KeyError, pycassa.NotFoundException):
                 meta = {"status": states.PENDING, "result": None}
             return meta
 
         return self._retry_on_error(_do_get)
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(servers=self.servers,
+                 keyspace=self.keyspace,
+                 column_family=self.column_family,
+                 cassandra_options=self.cassandra_options))
+        return super(CassandraBackend, self).__reduce__(args, kwargs)

+ 28 - 9
celery/backends/database.py

@@ -1,11 +1,14 @@
+from __future__ import absolute_import
+
 from datetime import datetime
 
-from celery import states
-from celery.backends.base import BaseDictBackend
-from celery.db.models import Task, TaskSet
-from celery.db.session import ResultSession
-from celery.exceptions import ImproperlyConfigured
-from celery.utils.timeutils import maybe_timedelta
+from .. import states
+from ..db.models import Task, TaskSet
+from ..db.session import ResultSession
+from ..exceptions import ImproperlyConfigured
+from ..utils.timeutils import maybe_timedelta
+
+from .base import BaseDictBackend
 
 
 def _sqlalchemy_installed():
@@ -21,21 +24,30 @@ _sqlalchemy_installed()
 
 class DatabaseBackend(BaseDictBackend):
     """The database result backend."""
+    # ResultSet.iterate should sleep this much between each pool,
+    # to not bombard the database with queries.
+    subpolling_interval = 0.5
 
     def __init__(self, dburi=None, expires=None,
             engine_options=None, **kwargs):
         super(DatabaseBackend, self).__init__(**kwargs)
+        conf = self.app.conf
         self.expires = maybe_timedelta(self.prepare_expires(expires))
-        self.dburi = dburi or self.app.conf.CELERY_RESULT_DBURI
+        self.dburi = dburi or conf.CELERY_RESULT_DBURI
         self.engine_options = dict(engine_options or {},
-                        **self.app.conf.CELERY_RESULT_ENGINE_OPTIONS or {})
+                        **conf.CELERY_RESULT_ENGINE_OPTIONS or {})
+        self.short_lived_sessions = kwargs.get("short_lived_sessions",
+                                    conf.CELERY_RESULT_DB_SHORT_LIVED_SESSIONS)
         if not self.dburi:
             raise ImproperlyConfigured(
                     "Missing connection string! Do you have "
                     "CELERY_RESULT_DBURI set to a real value?")
 
     def ResultSession(self):
-        return ResultSession(dburi=self.dburi, **self.engine_options)
+        return ResultSession(
+                    dburi=self.dburi,
+                    short_lived_sessions=self.short_lived_sessions,
+                    **self.engine_options)
 
     def _store_result(self, task_id, result, status, traceback=None):
         """Store return value and status of an executed task."""
@@ -122,3 +134,10 @@ class DatabaseBackend(BaseDictBackend):
             session.commit()
         finally:
             session.close()
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(dburi=self.dburi,
+                 expires=self.expires,
+                 engine_options=self.engine_options))
+        return super(DatabaseBackend, self).__reduce__(args, kwargs)

+ 16 - 9
celery/backends/mongodb.py

@@ -1,4 +1,6 @@
 """MongoDB backend for celery."""
+from __future__ import absolute_import
+
 from datetime import datetime
 
 try:
@@ -6,11 +8,11 @@ try:
 except ImportError:
     pymongo = None  # noqa
 
-from celery import states
-from celery.backends.base import BaseDictBackend
-from celery.exceptions import ImproperlyConfigured
-from celery.utils.serialization import pickle
-from celery.utils.timeutils import maybe_timedelta
+from .. import states
+from ..exceptions import ImproperlyConfigured
+from ..utils.timeutils import maybe_timedelta
+
+from .base import BaseDictBackend
 
 
 class Bunch:
@@ -98,9 +100,9 @@ class MongoBackend(BaseDictBackend):
 
         meta = {"_id": task_id,
                 "status": status,
-                "result": Binary(pickle.dumps(result)),
+                "result": Binary(self.encode(result)),
                 "date_done": datetime.now(),
-                "traceback": Binary(pickle.dumps(traceback))}
+                "traceback": Binary(self.encode(traceback))}
 
         db = self._get_database()
         taskmeta_collection = db[self.mongodb_taskmeta_collection]
@@ -120,9 +122,9 @@ class MongoBackend(BaseDictBackend):
         meta = {
             "task_id": obj["_id"],
             "status": obj["status"],
-            "result": pickle.loads(str(obj["result"])),
+            "result": self.decode(obj["result"]),
             "date_done": obj["date_done"],
-            "traceback": pickle.loads(str(obj["traceback"])),
+            "traceback": self.decode(obj["traceback"]),
         }
 
         return meta
@@ -136,3 +138,8 @@ class MongoBackend(BaseDictBackend):
                     "$lt": datetime.now() - self.expires,
                  }
         })
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(expires=self.expires))
+        return super(MongoBackend, self).__reduce__(args, kwargs)

+ 14 - 3
celery/backends/pyredis.py

@@ -6,7 +6,7 @@ Please use :class:`celery.backends.redis.RedisBackend` instead.
 """
 from __future__ import absolute_import
 
-from celery.backends import redis
+from . import redis
 
 
 class RedisBackend(redis.RedisBackend):
@@ -17,6 +17,17 @@ class RedisBackend(redis.RedisBackend):
         self.redis_port = redis_port
         self.redis_db = redis_db
         self.redis_password = redis_password
-        super(RedisBackend, self).__init__(host=redis_host,
+        # Changed in order to avoid duplicated arguments
+        super(RedisBackend, self).__init__(**dict(kwargs, host=redis_host,
                                            port=redis_port, db=redis_db,
-                                           password=redis_password, **kwargs)
+                                           password=redis_password))
+
+    def __reduce__(self, args=(), kwargs={}):
+        # Not very useful, but without the following, the redis_* attributes
+        # would not be set.
+        kwargs.update(
+            dict(redis_host=self.redis_host,
+                 redis_port=self.redis_port,
+                 redis_db=self.redis_db,
+                 redis_password=self.redis_password))
+        return super(RedisBackend, self).__reduce__(args, kwargs)

+ 16 - 7
celery/backends/redis.py

@@ -1,9 +1,9 @@
 from __future__ import absolute_import
 
-from kombu.utils import cached_property
+from ..exceptions import ImproperlyConfigured
+from ..utils import cached_property
 
-from celery.backends.base import KeyValueStoreBackend
-from celery.exceptions import ImproperlyConfigured
+from .base import KeyValueStoreBackend
 
 try:
     import redis
@@ -40,9 +40,9 @@ class RedisBackend(KeyValueStoreBackend):
                     "You need to install the redis library in order to use "
                   + "Redis result store backend.")
 
-        # For compatability with the old REDIS_* configuration keys.
+        # For compatibility with the old REDIS_* configuration keys.
         def _get(key):
-            for prefix in "REDIS_%s", "CELERY_REDIS_%s":
+            for prefix in "CELERY_REDIS_%s", "REDIS_%s":
                 try:
                     return conf[prefix % key]
                 except KeyError:
@@ -77,8 +77,8 @@ class RedisBackend(KeyValueStoreBackend):
 
     def on_chord_part_return(self, task, propagate=False,
             keyprefix="chord-unlock-%s"):
-        from celery.task.sets import subtask
-        from celery.result import TaskSetResult
+        from ..task.sets import subtask
+        from ..result import TaskSetResult
         setid = task.request.taskset
         key = keyprefix % setid
         deps = TaskSetResult.restore(setid, backend=task.backend)
@@ -91,3 +91,12 @@ class RedisBackend(KeyValueStoreBackend):
     def client(self):
         return self.redis.Redis(host=self.host, port=self.port,
                                 db=self.db, password=self.password)
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(host=self.host,
+                 port=self.port,
+                 db=self.db,
+                 password=self.password,
+                 expires=self.expires))
+        return super(RedisBackend, self).__reduce__(args, kwargs)

+ 11 - 2
celery/backends/tyrant.py

@@ -1,11 +1,14 @@
 """celery.backends.tyrant"""
+from __future__ import absolute_import
+
 try:
     import pytyrant
 except ImportError:
     pytyrant = None  # noqa
 
-from celery.backends.base import KeyValueStoreBackend
-from celery.exceptions import ImproperlyConfigured
+from ..exceptions import ImproperlyConfigured
+
+from .base import KeyValueStoreBackend
 
 
 class TyrantBackend(KeyValueStoreBackend):
@@ -82,3 +85,9 @@ class TyrantBackend(KeyValueStoreBackend):
 
     def delete(self, key):
         self.open().pop(key, None)
+
+    def __reduce__(self, args=(), kwargs={}):
+        kwargs.update(
+            dict(tyrant_host=self.tyrant_host,
+                 tyrant_port=self.tyrant_port))
+        return super(TyrantBackend, self).__reduce__(args, kwargs)

+ 40 - 30
celery/beat.py

@@ -1,3 +1,11 @@
+"""
+
+celery.beat
+===========
+
+The Celery periodic task scheduler.
+
+"""
 from __future__ import absolute_import
 
 import errno
@@ -14,17 +22,18 @@ except ImportError:
 
 from datetime import datetime
 
-from kombu.utils import cached_property
+from . import __version__
+from . import platforms
+from . import registry
+from . import signals
+from .app import app_or_default
+from .log import SilenceRepeated
+from .schedules import maybe_schedule, crontab
+from .utils import cached_property, instantiate, maybe_promise
+from .utils.timeutils import humanize_seconds
 
-from celery import __version__
-from celery import platforms
-from celery import registry
-from celery import signals
-from celery.app import app_or_default
-from celery.log import SilenceRepeated
-from celery.schedules import maybe_schedule, crontab
-from celery.utils import instantiate, maybe_promise
-from celery.utils.timeutils import humanize_seconds
+__all__ = ["SchedulingError", "ScheduleEntry", "Scheduler",
+           "Service", "EmbeddedService"]
 
 
 class SchedulingError(Exception):
@@ -81,13 +90,13 @@ class ScheduleEntry(object):
     def _default_now(self):
         return datetime.now()
 
-    def next(self, last_run_at=None):
+    def _next_instance(self, last_run_at=None):
         """Returns a new instance of the same class, but with
         its date and count fields updated."""
         return self.__class__(**dict(self,
                                      last_run_at=last_run_at or datetime.now(),
                                      total_run_count=self.total_run_count + 1))
-    __next__ = next  # for 2to3
+    __next__ = next = _next_instance  # for 2to3
 
     def update(self, other):
         """Update values from another entry.
@@ -162,15 +171,16 @@ class Scheduler(object):
         is_due, next_time_to_run = entry.is_due()
 
         if is_due:
-            self.logger.debug("Scheduler: Sending due task %s" % entry.task)
+            self.logger.debug("Scheduler: Sending due task %s", entry.task)
             try:
                 result = self.apply_async(entry, publisher=publisher)
             except Exception, exc:
-                self.logger.error("Message Error: %s\n%s" % (exc,
-                    traceback.format_stack()), exc_info=sys.exc_info())
+                self.logger.error("Message Error: %s\n%s", exc,
+                                  traceback.format_stack(),
+                                  exc_info=sys.exc_info())
             else:
-                self.logger.debug("%s sent. id->%s" % (entry.task,
-                                                       result.task_id))
+                self.logger.debug("%s sent. id->%s", entry.task,
+                                                     result.task_id)
         return next_time_to_run
 
     def tick(self):
@@ -281,8 +291,8 @@ class Scheduler(object):
         # callback called for each retry while the connection
         # can't be established.
         def _error_handler(exc, interval):
-            self.logger.error("Celerybeat: Connection error: %s. " % exc
-                            + "Trying again in %s seconds..." % interval)
+            self.logger.error("Celerybeat: Connection error: %s. "
+                              "Trying again in %s seconds...", exc, interval)
 
         return self.connection.ensure_connection(_error_handler,
                     self.app.conf.BROKER_CONNECTION_MAX_RETRIES)
@@ -327,8 +337,8 @@ class PersistentScheduler(Scheduler):
                                                 writeback=True)
             entries = self._store.setdefault("entries", {})
         except Exception, exc:
-            self.logger.error("Removing corrupted schedule file %r: %r" % (
-                self.schedule_filename, exc))
+            self.logger.error("Removing corrupted schedule file %r: %r",
+                              self.schedule_filename, exc, exc_info=True)
             self._remove_db()
             self._store = self.persistence.open(self.schedule_filename,
                                                 writeback=True)
@@ -373,15 +383,15 @@ class Service(object):
         self.schedule_filename = schedule_filename or \
                                     app.conf.CELERYBEAT_SCHEDULE_FILENAME
 
-        self._shutdown = threading.Event()
-        self._stopped = threading.Event()
+        self._is_shutdown = threading.Event()
+        self._is_stopped = threading.Event()
         self.debug = SilenceRepeated(self.logger.debug,
                         10 if self.max_interval < 60 else 1)
 
     def start(self, embedded_process=False):
         self.logger.info("Celerybeat: Starting...")
-        self.logger.debug("Celerybeat: Ticking with max interval->%s" % (
-                    humanize_seconds(self.scheduler.max_interval)))
+        self.logger.debug("Celerybeat: Ticking with max interval->%s",
+                          humanize_seconds(self.scheduler.max_interval))
 
         signals.beat_init.send(sender=self)
         if embedded_process:
@@ -389,24 +399,24 @@ class Service(object):
             platforms.set_process_title("celerybeat")
 
         try:
-            while not self._shutdown.isSet():
+            while not self._is_shutdown.isSet():
                 interval = self.scheduler.tick()
                 self.debug("Celerybeat: Waking up %s." % (
                         humanize_seconds(interval, prefix="in ")))
                 time.sleep(interval)
         except (KeyboardInterrupt, SystemExit):
-            self._shutdown.set()
+            self._is_shutdown.set()
         finally:
             self.sync()
 
     def sync(self):
         self.scheduler.close()
-        self._stopped.set()
+        self._is_stopped.set()
 
     def stop(self, wait=False):
         self.logger.info("Celerybeat: Shutting down...")
-        self._shutdown.set()
-        wait and self._stopped.wait()  # block until shutdown done.
+        self._is_shutdown.set()
+        wait and self._is_stopped.wait()  # block until shutdown done.
 
     def get_scheduler(self, lazy=False):
         filename = self.schedule_filename

+ 50 - 12
celery/bin/base.py

@@ -1,9 +1,18 @@
+from __future__ import absolute_import
+
 import os
 import sys
+import warnings
 
 from optparse import OptionParser, make_option as Option
 
-import celery
+from .. import __version__, Celery
+from ..exceptions import CDeprecationWarning, CPendingDeprecationWarning
+
+
+# always enable DeprecationWarnings, so our users can see them.
+for warning in (CDeprecationWarning, CPendingDeprecationWarning):
+    warnings.simplefilter("once", warning, 0)
 
 
 class Command(object):
@@ -13,11 +22,12 @@ class Command(object):
     :keyword get_app: Callable returning the current app if no app provided.
 
     """
+    _default_broker_url = r'amqp://guest:guest@localhost:5672//'
     #: Arg list used in help.
     args = ''
 
     #: Application version.
-    version = celery.__version__
+    version = __version__
 
     #: If false the parser will raise an exception if positional
     #: args are provided.
@@ -31,6 +41,10 @@ class Command(object):
             Option("--app",
                     default=None, action="store", dest="app",
                     help="Name of the app instance to use. "),
+            Option("-b", "--broker",
+                    default=None, action="store", dest="broker",
+                    help="Broker URL.  Default is %s" % (
+                            _default_broker_url, )),
             Option("--loader",
                    default=None, action="store", dest="loader",
                    help="Name of the loader class to use. "
@@ -117,15 +131,28 @@ class Command(object):
                            option_list=(self.preload_options +
                                         self.get_options()))
 
+    def prepare_preload_options(self, options):
+        """Optional handler to do additional processing of preload options.
+
+        Configuration must not have been initialized
+        until after this is called.
+
+        """
+        pass
+
     def setup_app_from_commandline(self, argv):
         preload_options = self.parse_preload_options(argv)
-        app = (preload_options.pop("app", None) or
+        self.prepare_preload_options(preload_options)
+        app = (preload_options.get("app") or
                os.environ.get("CELERY_APP") or
                self.app)
-        loader = (preload_options.pop("loader", None) or
+        loader = (preload_options.get("loader") or
                   os.environ.get("CELERY_LOADER") or
                   "default")
-        config_module = preload_options.pop("config_module", None)
+        broker = preload_options.get("broker", None)
+        if broker:
+            os.environ["CELERY_BROKER_URL"] = broker
+        config_module = preload_options.get("config_module")
         if config_module:
             os.environ["CELERY_CONFIG_MODULE"] = config_module
         if app:
@@ -137,7 +164,7 @@ class Command(object):
         return argv
 
     def get_cls_by_name(self, name):
-        from celery.utils import get_cls_by_name, import_from_cwd
+        from ..utils import get_cls_by_name, import_from_cwd
         return get_cls_by_name(name, imp=import_from_cwd)
 
     def process_cmdline_config(self, argv):
@@ -151,21 +178,32 @@ class Command(object):
 
     def parse_preload_options(self, args):
         acc = {}
-        preload_options = dict((opt._long_opts[0], opt.dest)
-                                for opt in self.preload_options)
-        for arg in args:
+        opts = {}
+        for opt in self.preload_options:
+            for t in (opt._long_opts, opt._short_opts):
+                opts.update(dict(zip(t, [opt.dest] * len(t))))
+        index = 0
+        length = len(args)
+        while index < length:
+            arg = args[index]
             if arg.startswith('--') and '=' in arg:
                 key, value = arg.split('=', 1)
-                dest = preload_options.get(key)
+                dest = opts.get(key)
                 if dest:
                     acc[dest] = value
+            elif arg.startswith('-'):
+                dest = opts.get(arg)
+                if dest:
+                    acc[dest] = args[index + 1]
+                    index += 1
+            index += 1
         return acc
 
     def _get_default_app(self, *args, **kwargs):
-        return celery.Celery(*args, **kwargs)
+        return Celery(*args, **kwargs)
 
 
-def daemon_options(default_pidfile, default_logfile=None):
+def daemon_options(default_pidfile=None, default_logfile=None):
     return (
         Option('-f', '--logfile', default=default_logfile,
                action="store", dest="logfile",

+ 6 - 3
celery/bin/camqadm.py

@@ -4,6 +4,8 @@
 .. program:: camqadm
 
 """
+from __future__ import absolute_import
+
 import cmd
 import sys
 import shlex
@@ -13,9 +15,10 @@ from itertools import count
 
 from amqplib import client_0_8 as amqp
 
-from celery.app import app_or_default
-from celery.bin.base import Command
-from celery.utils import padlist
+from ..app import app_or_default
+from ..utils import padlist
+
+from .base import Command
 
 # Valid string -> bool coercions.
 BOOLS = {"1": True, "0": False,

+ 15 - 7
celery/bin/celerybeat.py

@@ -25,21 +25,26 @@
 from __future__ import with_statement
 from __future__ import absolute_import
 
+import os
+
 from functools import partial
 
-from celery.platforms import detached
-from celery.bin.base import Command, Option, daemon_options
+from ..platforms import detached
+
+from .base import Command, Option, daemon_options
 
 
 class BeatCommand(Command):
     supports_args = False
+    preload_options = (Command.preload_options
+                     + daemon_options(default_pidfile="celerybeat.pid"))
 
     def run(self, detach=False, logfile=None, pidfile=None, uid=None,
             gid=None, umask=None, working_directory=None, **kwargs):
+        workdir = working_directory
         kwargs.pop("app", None)
         beat = partial(self.app.Beat,
                        logfile=logfile, pidfile=pidfile, **kwargs)
-        workdir = working_directory
 
         if detach:
             with detached(logfile, pidfile, uid, gid, umask, workdir):
@@ -47,6 +52,11 @@ class BeatCommand(Command):
         else:
             return beat().run()
 
+    def prepare_preload_options(self, options):
+        workdir = options.get("working_directory")
+        if workdir:
+            os.chdir(workdir)
+
     def get_options(self):
         conf = self.app.conf
 
@@ -61,7 +71,7 @@ class BeatCommand(Command):
                     "'.db' will be appended to the filename. Default: %s" % (
                             conf.CELERYBEAT_SCHEDULE_FILENAME, )),
             Option('--max-interval',
-                default=3600.0, type="float", dest="max_interval",
+                default=None, type="float", dest="max_interval",
                 help="Max. seconds to sleep between schedule iterations."),
             Option('-S', '--scheduler',
                 default=None,
@@ -71,9 +81,7 @@ class BeatCommand(Command):
             Option('-l', '--loglevel',
                 default=conf.CELERYBEAT_LOG_LEVEL,
                 action="store", dest="loglevel",
-                help="Loglevel. One of DEBUG/INFO/WARNING/ERROR/CRITICAL."),
-        ) + daemon_options(default_pidfile="celerybeat.pid",
-                           default_logfile=conf.CELERYBEAT_LOG_FILE)
+                help="Loglevel. One of DEBUG/INFO/WARNING/ERROR/CRITICAL."))
 
 
 def main():

+ 27 - 8
celery/bin/celeryctl.py

@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
@@ -8,10 +9,11 @@ from textwrap import wrap
 
 from anyjson import deserialize
 
-from celery import __version__
-from celery.app import app_or_default, current_app
-from celery.bin.base import Command as CeleryCommand
-from celery.utils import term
+from .. import __version__
+from ..app import app_or_default, current_app
+from ..utils import term
+
+from .base import Command as CeleryCommand
 
 
 commands = {}
@@ -201,7 +203,7 @@ class result(Command):
     )
 
     def run(self, task_id, *args, **kwargs):
-        from celery import registry
+        from .. import registry
         result_cls = self.app.AsyncResult
         task = kwargs.get("task")
 
@@ -219,7 +221,8 @@ class inspect(Command):
                "reserved": 1.0,
                "stats": 1.0,
                "revoked": 1.0,
-               "registered_tasks": 1.0,
+               "registered_tasks": 1.0,  # alias to registered
+               "registered": 1.0,
                "enable_events": 1.0,
                "disable_events": 1.0,
                "ping": 0.2,
@@ -231,6 +234,7 @@ class inspect(Command):
                     help="Timeout in seconds (float) waiting for reply"),
                 Option("--destination", "-d", dest="destination",
                     help="Comma separated list of destination node names."))
+    show_body = True
 
     def usage(self, command):
         return "%%prog %s [options] %s [%s]" % (
@@ -238,6 +242,7 @@ class inspect(Command):
 
     def run(self, *args, **kwargs):
         self.quiet = kwargs.get("quiet", False)
+        self.show_body = kwargs.get("show_body", False)
         if not args:
             raise Error("Missing inspect command. See --help")
         command = args[0]
@@ -273,7 +278,7 @@ class inspect(Command):
             return
         dirstr = not self.quiet and c.bold(c.white(direction), " ") or ""
         self.out(c.reset(dirstr, title))
-        if body and not self.quiet:
+        if body and self.show_body:
             self.out(body)
 inspect = command(inspect)
 
@@ -289,7 +294,7 @@ class status(Command):
     def run(self, *args, **kwargs):
         replies = inspect(app=self.app,
                           no_color=kwargs.get("no_color", False)) \
-                    .run("ping", **dict(kwargs, quiet=True))
+                    .run("ping", **dict(kwargs, quiet=True, show_body=False))
         if not replies:
             raise Error("No nodes replied within time constraint")
         nodecount = len(replies)
@@ -331,8 +336,22 @@ class celeryctl(CeleryCommand):
         except Error:
             return self.execute("help", argv)
 
+    def remove_options_at_beginning(self, argv, index=0):
+        if argv:
+            while index <= len(argv):
+                value = argv[index]
+                if value.startswith("--"):
+                    pass
+                elif value.startswith("-"):
+                    index += 1
+                else:
+                    return argv[index:]
+                index += 1
+        return []
+
     def handle_argv(self, prog_name, argv):
         self.prog_name = prog_name
+        argv = self.remove_options_at_beginning(argv)
         try:
             command = argv[0]
         except IndexError:

+ 6 - 3
celery/bin/celeryd.py

@@ -71,6 +71,8 @@
     terminated and replaced by a new worker.
 
 """
+from __future__ import absolute_import
+
 import sys
 
 try:
@@ -78,7 +80,7 @@ try:
 except ImportError:  # pragma: no cover
     freeze_support = lambda: True  # noqa
 
-from celery.bin.base import Command, Option
+from .base import Command, Option
 
 
 class WorkerCommand(Command):
@@ -90,7 +92,7 @@ class WorkerCommand(Command):
         kwargs.pop("app", None)
         # Pools like eventlet/gevent needs to patch libs as early
         # as possible.
-        from celery import concurrency
+        from .. import concurrency
         kwargs["pool"] = concurrency.get_implementation(
                     kwargs.get("pool") or self.app.conf.CELERYD_POOL)
         return self.app.Worker(**kwargs).run()
@@ -106,7 +108,8 @@ class WorkerCommand(Command):
                 default=conf.CELERYD_POOL,
                 action="store", dest="pool", type="str",
                 help="Pool implementation: "
-                     "processes (default), eventlet or gevent."),
+                     "processes (default), eventlet, gevent, "
+                     "solo or threads."),
             Option('--purge', '--discard', default=False,
                 action="store_true", dest="discard",
                 help="Discard all waiting tasks before the server is"

+ 7 - 7
celery/bin/celeryd_detach.py

@@ -6,9 +6,10 @@ import sys
 
 from optparse import OptionParser, BadOptionError
 
-from celery import __version__
-from celery.platforms import detached
-from celery.bin.base import daemon_options
+from .. import __version__
+from ..platforms import detached
+
+from .base import daemon_options
 
 OPTION_LIST = daemon_options(default_pidfile="celeryd.pid")
 
@@ -20,11 +21,10 @@ def detach(path, argv, logfile=None, pidfile=None, uid=None,
             os.execv(path, [path] + argv)
         except Exception:
             import logging
-            from celery.log import setup_logger
+            from ..log import setup_logger
             logger = setup_logger(logfile=logfile, loglevel=logging.ERROR)
-            logger.critical("Can't exec %r" % (
-                    " ".join([path] + argv), ),
-                    exc_info=sys.exc_info())
+            logger.critical("Can't exec %r", " ".join([path] + argv),
+                            exc_info=sys.exc_info())
 
 
 class PartialOptionParser(OptionParser):

+ 32 - 13
celery/bin/celeryd_multi.py

@@ -85,9 +85,10 @@ Examples
     celeryd -n xuzzy.myhost -c 3
 
 """
+from __future__ import absolute_import
+
 import errno
 import os
-import shlex
 import signal
 import socket
 import sys
@@ -96,8 +97,10 @@ from collections import defaultdict
 from subprocess import Popen
 from time import sleep
 
-from celery import __version__
-from celery.utils import term
+from .. import __version__
+from ..platforms import shellsplit
+from ..utils import term
+from ..utils.encoding import from_utf8
 
 SIGNAMES = set(sig for sig in dir(signal)
                         if sig.startswith("SIG") and "_" not in sig)
@@ -118,6 +121,7 @@ usage: %(prog_name)s start <node1 node2 nodeN|range> [celeryd options]
 
 additional options (must appear after command name):
 
+    * --nosplash:   Don't display program info.
     * --quiet:      Don't show as much output.
     * --verbose:    Show more output.
     * --no-color:   Don't display colors.
@@ -131,11 +135,13 @@ def main():
 class MultiTool(object):
     retcode = 0  # Final exit code.
 
-    def __init__(self, env=None):
+    def __init__(self, env=None, fh=None):
+        self.fh = fh or sys.stderr
         self.env = env
         self.commands = {"start": self.start,
                          "show": self.show,
                          "stop": self.stop,
+                         "stop_verify": self.stop_verify,
                          "restart": self.restart,
                          "kill": self.kill,
                          "names": self.names,
@@ -146,10 +152,13 @@ class MultiTool(object):
     def execute_from_commandline(self, argv, cmd="celeryd"):
         argv = list(argv)   # don't modify callers argv.
 
-        # Reserve the --quiet|-q/--verbose options.
+        # Reserve the --nosplash|--quiet|-q/--verbose options.
+        self.nosplash = False
         self.quiet = False
         self.verbose = False
         self.no_color = False
+        if "--nosplash" in argv:
+            self.nosplash = argv.pop(argv.index("--nosplash"))
         if "--quiet" in argv:
             self.quiet = argv.pop(argv.index("--quiet"))
         if "-q" in argv:
@@ -175,9 +184,12 @@ class MultiTool(object):
 
         return self.retcode
 
+    def say(self, msg):
+        self.fh.write("%s\n" % (msg, ))
+
     def names(self, argv, cmd):
         p = NamespacedOptionParser(argv)
-        print("\n".join(hostname
+        self.say("\n".join(hostname
                         for hostname, _, _ in multi_args(p, cmd)))
 
     def get(self, argv, cmd):
@@ -185,13 +197,13 @@ class MultiTool(object):
         p = NamespacedOptionParser(argv[1:])
         for name, worker, _ in multi_args(p, cmd):
             if name == wanted:
-                print(" ".join(worker))
+                self.say(" ".join(worker))
                 return
 
     def show(self, argv, cmd):
         p = NamespacedOptionParser(argv)
         self.note("> Starting nodes...")
-        print("\n".join(" ".join(worker)
+        self.say("\n".join(" ".join(worker)
                         for _, worker, _ in multi_args(p, cmd)))
 
     def start(self, argv, cmd):
@@ -277,7 +289,7 @@ class MultiTool(object):
             self.note("")
 
     def getpids(self, p, cmd, callback=None):
-        from celery import platforms
+        from .. import platforms
         pidfile_template = p.options.setdefault("--pidfile", "celeryd@%n.pid")
 
         nodes = []
@@ -331,11 +343,17 @@ class MultiTool(object):
         self._stop_nodes(p, cmd, retry=2, callback=on_node_shutdown)
         self.retval = int(any(retvals))
 
+    def stop_verify(self, argv, cmd):
+        self.splash()
+        p = NamespacedOptionParser(argv)
+        self.with_detacher_default_options(p)
+        return self._stop_nodes(p, cmd, retry=2)
+
     def expand(self, argv, cmd=None):
         template = argv[0]
         p = NamespacedOptionParser(argv[1:])
         for _, _, expander in multi_args(p, cmd):
-            print(expander(template))
+            self.say(expander(template))
 
     def help(self, argv, cmd=None):
         say(__doc__)
@@ -345,12 +363,13 @@ class MultiTool(object):
         say(USAGE % {"prog_name": self.prog_name})
 
     def splash(self):
-        c = self.colored
-        self.note(c.cyan("celeryd-multi v%s" % __version__))
+        if not self.nosplash:
+            c = self.colored
+            self.note(c.cyan("celeryd-multi v%s" % __version__))
 
     def waitexec(self, argv, path=sys.executable):
         args = " ".join([path] + list(argv))
-        argstr = shlex.split(args.encode("utf-8"))
+        argstr = shellsplit(from_utf8(args))
         pipe = Popen(argstr, env=self.env)
         self.info("  %s" % " ".join(argstr))
         retcode = pipe.wait()

+ 16 - 9
celery/bin/celeryev.py

@@ -1,17 +1,21 @@
 from __future__ import absolute_import
 from __future__ import with_statement
 
+import os
 import sys
 
 from functools import partial
 
-from celery import platforms
-from celery.platforms import detached
-from celery.bin.base import Command, Option, daemon_options
+from .. import platforms
+from ..platforms import detached
+
+from .base import Command, Option, daemon_options
 
 
 class EvCommand(Command):
     supports_args = False
+    preload_options = (Command.preload_options
+                     + daemon_options(default_pidfile="celeryev.pid"))
 
     def run(self, dump=False, camera=None, frequency=1.0, maxrate=None,
             loglevel="INFO", logfile=None, prog_name="celeryev",
@@ -30,20 +34,25 @@ class EvCommand(Command):
                                   detach=detach)
         return self.run_evtop()
 
+    def prepare_preload_options(self, options):
+        workdir = options.get("working_directory")
+        if workdir:
+            os.chdir(workdir)
+
     def run_evdump(self):
-        from celery.events.dumper import evdump
+        from ..events.dumper import evdump
         self.set_process_status("dump")
         return evdump(app=self.app)
 
     def run_evtop(self):
-        from celery.events.cursesmon import evtop
+        from ..events.cursesmon import evtop
         self.set_process_status("top")
         return evtop(app=self.app)
 
     def run_evcam(self, camera, logfile=None, pidfile=None, uid=None,
             gid=None, umask=None, working_directory=None,
             detach=False, **kwargs):
-        from celery.events.snapshot import evcam
+        from ..events.snapshot import evcam
         workdir = working_directory
         self.set_process_status("cam")
         kwargs["app"] = self.app
@@ -81,9 +90,7 @@ class EvCommand(Command):
                    help="Recording: Shutter rate limit (e.g. 10/m)"),
             Option('-l', '--loglevel',
                    action="store", dest="loglevel", default="INFO",
-                   help="Loglevel. Default is WARNING."),
-        ) + daemon_options(default_pidfile="celeryev.pid",
-                           default_logfile=None)
+                   help="Loglevel. Default is WARNING."))
 
 
 def main():

+ 5 - 3
celery/concurrency/__init__.py

@@ -1,9 +1,11 @@
-from celery.utils import get_cls_by_name
+from __future__ import absolute_import
+
+from ..utils import get_cls_by_name
 
 ALIASES = {
     "processes": "celery.concurrency.processes.TaskPool",
-    "eventlet": "celery.concurrency.evlet.TaskPool",
-    "gevent": "celery.concurrency.evg.TaskPool",
+    "eventlet": "celery.concurrency.eventlet.TaskPool",
+    "gevent": "celery.concurrency.gevent.TaskPool",
     "threads": "celery.concurrency.threads.TaskPool",
     "solo": "celery.concurrency.solo.TaskPool",
 }

+ 21 - 9
celery/concurrency/base.py

@@ -1,3 +1,6 @@
+from __future__ import absolute_import
+
+import logging
 import os
 import sys
 import time
@@ -5,9 +8,10 @@ import traceback
 
 from functools import partial
 
-from celery import log
-from celery.datastructures import ExceptionInfo
-from celery.utils import timer2
+from .. import log
+from ..datastructures import ExceptionInfo
+from ..utils import timer2
+from ..utils.encoding import safe_repr
 
 
 def apply_target(target, args=(), kwargs={}, callback=None,
@@ -25,6 +29,7 @@ class BasePool(object):
     Timer = timer2.Timer
 
     signal_safe = True
+    rlimit_safe = True
     is_green = False
 
     _state = None
@@ -35,6 +40,7 @@ class BasePool(object):
         self.putlocks = putlocks
         self.logger = logger or log.get_default_logger()
         self.options = options
+        self.does_debug = self.logger.isEnabledFor(logging.DEBUG)
 
     def on_start(self):
         pass
@@ -80,8 +86,9 @@ class BasePool(object):
         on_ready = partial(self.on_ready, callback, errback)
         on_worker_error = partial(self.on_worker_error, errback)
 
-        self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)" % (
-            target, args, kwargs))
+        if self.does_debug:
+            self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)",
+                            target, safe_repr(args), safe_repr(kwargs))
 
         return self.on_apply(target, args, kwargs,
                              callback=on_ready,
@@ -104,16 +111,17 @@ class BasePool(object):
         else:
             self.safe_apply_callback(callback, ret_value)
 
-    def on_worker_error(self, errback, exc):
-        errback(ExceptionInfo((exc.__class__, exc, None)))
+    def on_worker_error(self, errback, exc_info):
+        errback(exc_info)
 
     def safe_apply_callback(self, fun, *args):
         if fun:
             try:
                 fun(*args)
             except BaseException:
-                self.logger.error("Pool callback raised exception: %s" % (
-                    traceback.format_exc(), ), exc_info=sys.exc_info())
+                self.logger.error("Pool callback raised exception: %s",
+                                  traceback.format_exc(),
+                                  exc_info=sys.exc_info())
 
     def _get_info(self):
         return {}
@@ -125,3 +133,7 @@ class BasePool(object):
     @property
     def active(self):
         return self._state == self.RUN
+
+    @property
+    def num_processes(self):
+        return self.limit

+ 6 - 3
celery/concurrency/evlet.py → celery/concurrency/eventlet.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import os
 import sys
 
@@ -9,9 +11,10 @@ if not os.environ.get("EVENTLET_NOPATCH"):
     eventlet.monkey_patch()
     eventlet.debug.hub_prevent_multiple_readers(False)
 
-from celery import signals
-from celery.concurrency import base
-from celery.utils import timer2
+from .. import signals
+from ..utils import timer2
+
+from . import base
 
 
 def apply_target(target, args=(), kwargs={}, callback=None,

+ 6 - 2
celery/concurrency/evg.py → celery/concurrency/gevent.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import os
 import sys
 
@@ -7,8 +9,9 @@ if not os.environ.get("GEVENT_NOPATCH"):
     from gevent import monkey
     monkey.patch_all()
 
-from celery.concurrency.base import apply_target, BasePool
-from celery.utils import timer2
+from ..utils import timer2
+
+from .base import apply_target, BasePool
 
 
 class Schedule(timer2.Schedule):
@@ -84,6 +87,7 @@ class TaskPool(BasePool):
     Timer = Timer
 
     signal_safe = False
+    rlimit_safe = False
     is_green = True
 
     def __init__(self, *args, **kwargs):

+ 11 - 6
celery/concurrency/processes/__init__.py

@@ -3,20 +3,21 @@
 Process Pools.
 
 """
+from __future__ import absolute_import
+
 import platform
 import signal as _signal
 
-from os import kill as _kill
-
-from celery.concurrency.base import BasePool
-from celery.concurrency.processes.pool import Pool, RUN
+from ..base import BasePool
+from .pool import Pool, RUN
 
 if platform.system() == "Windows":  # pragma: no cover
     # On Windows os.kill calls TerminateProcess which cannot be
     # handled by # any process, so this is needed to terminate the task
     # *and its children* (if any).
-    from celery.concurrency.processes import _win
-    _kill = _win.kill_processtree  # noqa
+    from ._win import kill_processtree as _kill  # noqa
+else:
+    from os import kill as _kill                 # noqa
 
 
 class TaskPool(BasePool):
@@ -74,3 +75,7 @@ class TaskPool(BasePool):
                 "max-tasks-per-child": self._pool._maxtasksperchild,
                 "put-guarded-by-semaphore": self.putlocks,
                 "timeouts": (self._pool.soft_timeout, self._pool.timeout)}
+
+    @property
+    def num_processes(self):
+        return self._pool._processes

+ 2 - 0
celery/concurrency/processes/_win.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import os
 
 __all__ = ["get_processtree_pids", "kill_processtree"]

+ 54 - 16
celery/concurrency/processes/pool.py

@@ -5,6 +5,7 @@
 #
 # Copyright (c) 2007-2008, R Oudkerk --- see COPYING.txt
 #
+from __future__ import absolute_import
 
 __all__ = ['Pool']
 
@@ -22,11 +23,13 @@ import collections
 import time
 import signal
 import warnings
+import logging
 
 from multiprocessing import Process, cpu_count, TimeoutError
 from multiprocessing import util
 from multiprocessing.util import Finalize, debug
 
+from celery.datastructures import ExceptionInfo
 from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded
 from celery.exceptions import WorkerLostError
 
@@ -74,16 +77,30 @@ class LaxBoundedSemaphore(threading._Semaphore):
         _Semaphore.__init__(self, value, verbose)
         self._initial_value = value
 
-    def release(self):
-        if self._Semaphore__value < self._initial_value:
-            _Semaphore.release(self)
-        if __debug__:
-            self._note("%s.release: success, value=%s (unchanged)" % (
-                self, self._Semaphore__value))
+    if sys.version_info >= (3, 0):
 
-    def clear(self):
-        while self._Semaphore__value < self._initial_value:
-            _Semaphore.release(self)
+        def release(self):
+            if self._value < self._initial_value:
+                _Semaphore.release(self)
+            if __debug__:
+                self._note("%s.release: success, value=%s (unchanged)" % (
+                    self, self._value))
+
+        def clear(self):
+            while self._value < self._initial_value:
+                _Semaphore.release(self)
+    else:
+
+        def release(self):  # noqa
+            if self._Semaphore__value < self._initial_value:
+                _Semaphore.release(self)
+            if __debug__:
+                self._note("%s.release: success, value=%s (unchanged)" % (
+                    self, self._Semaphore__value))
+
+        def clear(self):  # noqa
+            while self._Semaphore__value < self._initial_value:
+                _Semaphore.release(self)
 
 #
 # Exceptions
@@ -119,6 +136,17 @@ def soft_timeout_sighandler(signum, frame):
 
 
 def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None):
+    # Re-init logging system.
+    # Workaround for http://bugs.python.org/issue6721#msg140215
+    # Python logging module uses RLock() objects which are broken after
+    # fork. This can result in a deadlock (Issue #496).
+    logger_names = logging.Logger.manager.loggerDict.keys()
+    logger_names.append(None)  # for root logger
+    for name in logger_names:
+        for handler in logging.getLogger(name).handlers:
+            handler.createLock()
+    logging._lock = threading.RLock()
+
     pid = os.getpid()
     assert maxtasks is None or (type(maxtasks) == int and maxtasks > 0)
     put = outqueue.put
@@ -166,8 +194,8 @@ def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None):
         put((ACK, (job, i, time.time(), pid)))
         try:
             result = (True, func(*args, **kwds))
-        except Exception, e:
-            result = (False, e)
+        except Exception:
+            result = (False, ExceptionInfo(sys.exc_info()))
         try:
             put((READY, (job, i, result)))
         except Exception, exc:
@@ -320,7 +348,12 @@ class TimeoutHandler(PoolThread):
                 return
             debug('hard time limit exceeded for %i', i)
             # Remove from cache and set return value to an exception
-            job._set(i, (False, TimeLimitExceeded(hard_timeout)))
+            exc_info = None
+            try:
+                raise TimeLimitExceeded(hard_timeout)
+            except TimeLimitExceeded:
+                exc_info = sys.exc_info()
+            job._set(i, (False, ExceptionInfo(exc_info)))
 
             # Remove from _pool
             process, _index = _process_by_pid(job._worker_pid)
@@ -572,8 +605,12 @@ class Pool(object):
                 if not job.ready() and job._worker_lost]:
             now = now or time.time()
             if now - job._worker_lost > job._lost_worker_timeout:
-                err = WorkerLostError("Worker exited prematurely.")
-                job._set(None, (False, err))
+                exc_info = None
+                try:
+                    raise WorkerLostError("Worker exited prematurely.")
+                except WorkerLostError:
+                    exc_info = ExceptionInfo(sys.exc_info())
+                job._set(None, (False, exc_info))
 
         if shutdown and not len(self._pool):
             raise WorkersJoined()
@@ -592,10 +629,11 @@ class Pool(object):
             for job in self._cache.values():
                 for worker_pid in job.worker_pids():
                     if worker_pid in cleaned and not job.ready():
-                        if self._putlock is not None:
-                            self._putlock.release()
                         job._worker_lost = time.time()
                         continue
+            if self._putlock is not None:
+                for worker in cleaned:
+                    self._putlock.release()
             return True
         return False
 

+ 3 - 1
celery/concurrency/solo.py

@@ -1,6 +1,8 @@
+from __future__ import absolute_import
+
 import os
 
-from celery.concurrency.base import BasePool, apply_target
+from .base import BasePool, apply_target
 
 
 class TaskPool(BasePool):

+ 14 - 1
celery/concurrency/threads.py

@@ -1,4 +1,14 @@
-from celery.concurrency.base import apply_target, BasePool
+from __future__ import absolute_import
+
+from UserDict import UserDict
+
+from .base import apply_target, BasePool
+
+
+class NullDict(UserDict):
+
+    def __setitem__(self, key, value):
+        pass
 
 
 class TaskPool(BasePool):
@@ -15,6 +25,9 @@ class TaskPool(BasePool):
 
     def on_start(self):
         self._pool = self.ThreadPool(self.limit)
+        # threadpool stores all work requests until they are processed
+        # we don't need this dict, and it occupies way too much memory.
+        self._pool.workRequests = NullDict()
 
     def on_stop(self):
         self._pool.dismissWorkers(self.limit, do_join=True)

+ 4 - 2
celery/conf.py

@@ -6,8 +6,10 @@ Use :mod:`celery.defaults` instead.
 
 
 """
-from celery import current_app
-from celery.app import defaults
+from __future__ import absolute_import
+
+from . import current_app
+from .app import defaults
 
 _DEFAULTS = defaults.DEFAULTS
 conf = current_app.conf

+ 2 - 0
celery/contrib/abortable.py

@@ -78,6 +78,8 @@ have it block until the task is finished.
    database backends.
 
 """
+from __future__ import absolute_import
+
 from celery.task.base import Task
 from celery.result import AsyncResult
 

+ 30 - 7
celery/contrib/batches.py

@@ -39,24 +39,47 @@ Registering the click is done as follows:
 :license: BSD, see LICENSE for more details.
 
 """
-from itertools import count
-from Queue import Queue
+from __future__ import absolute_import
 
-from kombu.utils import cached_property
+from itertools import count
+from Queue import Empty, Queue
 
-from celery.datastructures import consume_queue
 from celery.task import Task
-from celery.utils import timer2
+from celery.utils import cached_property, timer2
 from celery.worker import state
 
 
+def consume_queue(queue):
+    """Iterator yielding all immediately available items in a
+    :class:`Queue.Queue`.
+
+    The iterator stops as soon as the queue raises :exc:`Queue.Empty`.
+
+    *Examples*
+
+        >>> q = Queue()
+        >>> map(q.put, range(4))
+        >>> list(consume_queue(q))
+        [0, 1, 2, 3]
+        >>> list(consume_queue(q))
+        []
+
+    """
+    get = queue.get_nowait
+    while 1:
+        try:
+            yield get()
+        except Empty:
+            break
+
+
 def apply_batches_task(task, args, loglevel, logfile):
     task.request.update({"loglevel": loglevel, "logfile": logfile})
     try:
         result = task(*args)
     except Exception, exp:
         result = None
-        task.logger.error("There was an Exception: %s" % exp)
+        task.logger.error("There was an Exception: %s", exp, exc_info=True)
     finally:
         task.request.clear()
     return result
@@ -167,7 +190,7 @@ class Batches(Task):
                     callback=acks_late[True] and on_return or None)
 
     def debug(self, msg):
-        self.logger.debug("%s: %s" % (self.name, msg))
+        self.logger.debug("%s: %s", self.name, msg)
 
     @cached_property
     def logger(self):

+ 1 - 0
celery/contrib/rdb.py

@@ -36,6 +36,7 @@ Inspired by http://snippets.dzone.com/posts/show/7248
 :license: BSD, see LICENSE for more details.
 
 """
+from __future__ import absolute_import
 
 import errno
 import os

+ 112 - 53
celery/datastructures.py

@@ -2,23 +2,24 @@
 celery.datastructures
 =====================
 
-Custom data structures.
-
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
+Custom types and data structures.
 
 """
 from __future__ import absolute_import
 from __future__ import with_statement
 
+import sys
 import time
 import traceback
 
 from itertools import chain
-from Queue import Empty
 from threading import RLock
 
-from celery.utils.compat import OrderedDict
+from .utils.compat import UserDict, OrderedDict
+
+__all__ = ["AttributeDictMixin", "AttributeDict", "DictAttribute",
+           "ConfigurationView", "ExceptionInfo", "LimitedSet",
+           "LRUCache", "TokenBucket"]
 
 
 class AttributeDictMixin(object):
@@ -81,8 +82,16 @@ class DictAttribute(object):
     def __contains__(self, key):
         return hasattr(self.obj, key)
 
-    def iteritems(self):
+    def _iterate_items(self):
         return vars(self.obj).iteritems()
+    iteritems = _iterate_items
+
+    if sys.version_info >= (3, 0):
+        items = _iterate_items
+    else:
+
+        def items(self):
+            return list(self._iterate_items())
 
 
 class ConfigurationView(AttributeDictMixin):
@@ -147,23 +156,53 @@ class ConfigurationView(AttributeDictMixin):
         # changes takes precedence.
         return chain(*[op(d) for d in reversed(self._order)])
 
-    def iterkeys(self):
+    def _iterate_keys(self):
         return self._iter(lambda d: d.iterkeys())
+    iterkeys = _iterate_keys
 
-    def iteritems(self):
+    def _iterate_items(self):
         return self._iter(lambda d: d.iteritems())
+    iteritems = _iterate_items
 
-    def itervalues(self):
+    def _iterate_values(self):
         return self._iter(lambda d: d.itervalues())
+    itervalues = _iterate_values
 
     def keys(self):
-        return list(self.iterkeys())
+        return list(self._iterate_keys())
 
     def items(self):
-        return list(self.iteritems())
+        return list(self._iterate_items())
 
     def values(self):
-        return list(self.itervalues())
+        return list(self._iterate_values())
+
+
+class _Code(object):
+
+    def __init__(self, code):
+        self.co_filename = code.co_filename
+        self.co_name = code.co_name
+
+
+class _Frame(object):
+    Code = _Code
+
+    def __init__(self, frame):
+        self.f_globals = {"__file__": frame.f_globals["__file__"]}
+        self.f_code = self.Code(frame.f_code)
+
+
+class Traceback(object):
+    Frame = _Frame
+
+    def __init__(self, tb):
+        self.tb_frame = self.Frame(tb.tb_frame)
+        self.tb_lineno = tb.tb_lineno
+        if tb.tb_next is None:
+            self.tb_next = None
+        else:
+            self.tb_next = Traceback(tb.tb_next)
 
 
 class ExceptionInfo(object):
@@ -174,15 +213,21 @@ class ExceptionInfo(object):
 
     """
 
-    #: The original exception.
+    #: Exception type.
+    type = None
+
+    #: Exception instance.
     exception = None
 
-    #: A traceback form the point when :attr:`exception` was raised.
+    #: Pickleable traceback instance for use with :mod:`traceback`
+    tb = None
+
+    #: String representation of the traceback.
     traceback = None
 
     def __init__(self, exc_info):
-        _, exception, _ = exc_info
-        self.exception = exception
+        self.type, self.exception, tb = exc_info
+        self.tb = Traceback(tb)
         self.traceback = ''.join(traceback.format_exception(*exc_info))
 
     def __str__(self):
@@ -191,29 +236,9 @@ class ExceptionInfo(object):
     def __repr__(self):
         return "<ExceptionInfo: %r>" % (self.exception, )
 
-
-def consume_queue(queue):
-    """Iterator yielding all immediately available items in a
-    :class:`Queue.Queue`.
-
-    The iterator stops as soon as the queue raises :exc:`Queue.Empty`.
-
-    *Examples*
-
-        >>> q = Queue()
-        >>> map(q.put, range(4))
-        >>> list(consume_queue(q))
-        [0, 1, 2, 3]
-        >>> list(consume_queue(q))
-        []
-
-    """
-    get = queue.get_nowait
-    while 1:
-        try:
-            yield get()
-        except Empty:
-            break
+    @property
+    def exc_info(self):
+        return self.type, self.exception, self.tb
 
 
 class LimitedSet(object):
@@ -291,27 +316,61 @@ class LimitedSet(object):
         return self.chronologically[0]
 
 
-class LocalCache(OrderedDict):
-    """Dictionary with a finite number of keys.
+class LRUCache(UserDict):
+    """LRU Cache implementation using a doubly linked list to track access.
 
-    Older items expires first.
+    :keyword limit: The maximum number of keys to keep in the cache.
+        When a new key is inserted and the limit has been exceeded,
+        the *Least Recently Used* key will be discarded from the
+        cache.
 
     """
 
     def __init__(self, limit=None):
-        super(LocalCache, self).__init__()
         self.limit = limit
-        self.lock = RLock()
+        self.mutex = RLock()
+        self.data = OrderedDict()
+
+    def __getitem__(self, key):
+        with self.mutex:
+            value = self[key] = self.data.pop(key)
+            return value
+
+    def keys(self):
+        # userdict.keys in py3k calls __getitem__
+        return self.data.keys()
+
+    def values(self):
+        return list(self._iterate_values())
+
+    def items(self):
+        return list(self._iterate_items())
 
     def __setitem__(self, key, value):
-        with self.lock:
-            while len(self) >= self.limit:
-                self.popitem(last=False)
-            super(LocalCache, self).__setitem__(key, value)
-
-    def pop(self, key, *args):
-        with self.lock:
-            super(LocalCache, self).pop(key, *args)
+        # remove least recently used key.
+        with self.mutex:
+            if self.limit and len(self.data) >= self.limit:
+                self.data.pop(iter(self.data).next())
+            self.data[key] = value
+
+    def __iter__(self):
+        return self.data.iterkeys()
+
+    def _iterate_items(self):
+        for k in self.data:
+            try:
+                yield (k, self.data[k])
+            except KeyError:
+                pass
+    iteritems = _iterate_items
+
+    def _iterate_values(self):
+        for k in self.data:
+            try:
+                yield self.data[k]
+            except KeyError:
+                pass
+    itervalues = _iterate_values
 
 
 class TokenBucket(object):

+ 2 - 0
celery/db/a805d4bd.py

@@ -19,6 +19,8 @@ Hence the random module name "a805d5bd" is taken to decrease the chances of
 a collision.
 
 """
+from __future__ import absolute_import
+
 from sqlalchemy.types import PickleType as _PickleType
 
 

+ 2 - 0
celery/db/dfd042c7.py

@@ -5,6 +5,8 @@ SQLAlchemy 0.5.8 version of a805d4bd, see the docstring of that module
 for an explanation of this workaround.
 
 """
+from __future__ import absolute_import
+
 from sqlalchemy.types import PickleType as _PickleType
 from sqlalchemy import util
 

+ 8 - 4
celery/db/models.py

@@ -1,14 +1,18 @@
+from __future__ import absolute_import
+
 from datetime import datetime
 
 import sqlalchemy as sa
 
-from celery import states
-from celery.db.session import ResultModelBase
+from .. import states
+
+from .session import ResultModelBase
+
 # See docstring of a805d4bd for an explanation for this workaround ;)
 if sa.__version__.startswith('0.5'):
-    from celery.db.dfd042c7 import PickleType
+    from .dfd042c7 import PickleType
 else:
-    from celery.db.a805d4bd import PickleType  # noqa
+    from .a805d4bd import PickleType  # noqa
 
 
 class Task(ResultModelBase):

+ 7 - 5
celery/db/session.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 from collections import defaultdict
 
 from sqlalchemy import create_engine
@@ -8,7 +10,7 @@ ResultModelBase = declarative_base()
 
 _SETUP = defaultdict(lambda: False)
 _ENGINES = {}
-_MAKERS = {}
+_SESSIONS = {}
 
 
 def get_engine(dburi, **kwargs):
@@ -17,11 +19,11 @@ def get_engine(dburi, **kwargs):
     return _ENGINES[dburi]
 
 
-def create_session(dburi, **kwargs):
+def create_session(dburi, short_lived_sessions=False, **kwargs):
     engine = get_engine(dburi, **kwargs)
-    if dburi not in _MAKERS:
-        _MAKERS[dburi] = sessionmaker(bind=engine)
-    return engine, _MAKERS[dburi]
+    if short_lived_sessions or dburi not in _SESSIONS:
+        _SESSIONS[dburi] = sessionmaker(bind=engine)
+    return engine, _SESSIONS[dburi]
 
 
 def setup_results(engine):

+ 7 - 2
celery/decorators.py

@@ -12,12 +12,17 @@ The new decorators does not support magic keyword arguments.
 :license: BSD, see LICENSE for more details.
 
 """
+from __future__ import absolute_import
+
 import warnings
 
-from celery import task as _task
+from . import task as _task
+from .exceptions import CDeprecationWarning
+
+__all__ = ["task", "periodic_task"]
 
 
-warnings.warn(PendingDeprecationWarning("""
+warnings.warn(CDeprecationWarning("""
 The `celery.decorators` module and the magic keyword arguments
 are pending deprecation and will be deprecated in 2.4, then removed
 in 3.0.

+ 16 - 4
celery/events/__init__.py

@@ -1,3 +1,13 @@
+"""
+
+celery.events
+=============
+
+Events are messages sent for actions happening
+in the worker (and clients if :setting:`CELERY_SEND_TASK_SENT_EVENT` is
+enabled).  These events can be used for monitoring.
+
+"""
 from __future__ import absolute_import
 from __future__ import with_statement
 
@@ -12,8 +22,10 @@ from itertools import count
 from kombu.entity import Exchange, Queue
 from kombu.messaging import Consumer, Producer
 
-from celery.app import app_or_default
-from celery.utils import gen_unique_id
+from ..app import app_or_default
+from ..utils import uuid
+
+__all__ = ["event_exchange", "Event", "EventDispatcher", "EventReceiver"]
 
 event_exchange = Exchange("celeryev", type="topic")
 
@@ -145,7 +157,7 @@ class EventReceiver(object):
         if handlers is not None:
             self.handlers = handlers
         self.routing_key = routing_key
-        self.node_id = node_id or gen_unique_id()
+        self.node_id = node_id or uuid()
         self.queue = Queue("%s.%s" % ("celeryev", self.node_id),
                            exchange=event_exchange,
                            routing_key=self.routing_key,
@@ -240,7 +252,7 @@ class Events(object):
                                app=self.app)
 
     def State(self):
-        from celery.events.state import State as _State
+        from .state import State as _State
         return _State()
 
     @contextmanager

+ 16 - 5
celery/events/cursesmon.py

@@ -1,4 +1,12 @@
-import celery
+"""
+celery.events.cursesmon
+=======================
+
+celeryev is a monitor written in curses using celery events.
+
+"""
+from __future__ import absolute_import
+
 import curses
 import sys
 import threading
@@ -9,9 +17,12 @@ from itertools import count
 from textwrap import wrap
 from math import ceil
 
-from celery import states
-from celery.app import app_or_default
-from celery.utils import abbr, abbrtask
+from .. import __version__
+from .. import states
+from ..app import app_or_default
+from ..utils import abbr, abbrtask
+
+__all__ = ["CursesMonitor", "evtop"]
 
 BORDER_SPACING = 4
 LEFT_BORDER_OFFSET = 3
@@ -35,7 +46,7 @@ class CursesMonitor(object):
     online_str = "Workers online: "
     help_title = "Keys: "
     help = ("j:up k:down i:info t:traceback r:result c:revoke ^c: quit")
-    greet = "celeryev %s" % celery.__version__
+    greet = "celeryev %s" % __version__
     info_str = "Info: "
 
     def __init__(self, state, keymap=None, app=None):

+ 17 - 4
celery/events/dumper.py

@@ -1,12 +1,25 @@
+"""
+
+celery.events.dumper
+====================
+
+This is a simple program used to show events as they are happening.
+Like tcpdump just for Celery events.
+
+"""
+from __future__ import absolute_import
+
 import sys
 
 from datetime import datetime
 
-from celery.app import app_or_default
-from celery.datastructures import LocalCache
+from ..app import app_or_default
+from ..datastructures import LRUCache
+
+__all__ = ["Dumper", "evdump"]
 
 
-TASK_NAMES = LocalCache(0xFFF)
+TASK_NAMES = LRUCache(limit=0xFFF)
 
 HUMAN_TYPES = {"worker-offline": "shutdown",
                "worker-online": "started",
@@ -28,7 +41,7 @@ class Dumper(object):
         hostname = event.pop("hostname")
         if type.startswith("task-"):
             uuid = event.pop("uuid")
-            if type.startswith("task-received"):
+            if type in ("task-received", "task-sent"):
                 task = TASK_NAMES[uuid] = "%s(%s) args=%s kwargs=%s" % (
                         event.pop("name"), uuid,
                         event.pop("args"),

+ 23 - 8
celery/events/snapshot.py

@@ -1,12 +1,27 @@
+"""
+
+celery.events.snapshot
+======================
+
+Consuming the events as a stream is not always suitable,
+so this module implements a system to take snapshots of the
+state of a cluster.  There is a full implementation of this
+writing the snapshots to a database in ``django-celery``.
+
+"""
+
+from __future__ import absolute_import
+
 import atexit
 
-from celery import platforms
-from celery.app import app_or_default
-from celery.datastructures import TokenBucket
-from celery.utils import timer2
-from celery.utils import instantiate, LOG_LEVELS
-from celery.utils.dispatch import Signal
-from celery.utils.timeutils import rate
+from .. import platforms
+from ..app import app_or_default
+from ..datastructures import TokenBucket
+from ..utils import timer2, instantiate, LOG_LEVELS
+from ..utils.dispatch import Signal
+from ..utils.timeutils import rate
+
+__all__ = ["Polaroid", "evcam"]
 
 
 class Polaroid(object):
@@ -48,7 +63,7 @@ class Polaroid(object):
 
     def shutter(self):
         if self.maxrate is None or self.maxrate.can_consume():
-            self.logger.debug("Shutter: %s" % (self.state, ))
+            self.logger.debug("Shutter: %s", self.state)
             self.shutter_signal.send(self.state)
             self.on_shutter(self.state)
 

+ 40 - 12
celery/events/state.py

@@ -1,3 +1,22 @@
+"""
+
+celery.events.state
+===================
+
+This module implements a way to keep track of the
+state of a cluster of workers and the tasks it is working on
+by consuming events.
+
+For every event consumed the state is updated, so
+it represents the state of the cluster at the time
+of the last event.
+
+Snapshots (:mod:`celery.events.snapshot`) can be used
+to take pictures of this state at regular intervals
+and e.g. store it inside a database.
+
+"""
+
 from __future__ import absolute_import
 from __future__ import with_statement
 
@@ -6,9 +25,11 @@ import heapq
 
 from threading import Lock
 
-from celery import states
-from celery.datastructures import AttributeDict, LocalCache
-from celery.utils import kwdict
+from .. import states
+from ..datastructures import AttributeDict, LRUCache
+from ..utils import kwdict
+
+__all__ = ["HEARTBEAT_EXPIRE", "Worker", "Task", "State", "state"]
 
 #: Hartbeat expiry time in seconds.  The worker will be considered offline
 #: if no heartbeat is received within this time.
@@ -173,8 +194,8 @@ class State(object):
 
     def __init__(self, callback=None,
             max_workers_in_memory=5000, max_tasks_in_memory=10000):
-        self.workers = LocalCache(max_workers_in_memory)
-        self.tasks = LocalCache(max_tasks_in_memory)
+        self.workers = LRUCache(limit=max_workers_in_memory)
+        self.tasks = LRUCache(limit=max_tasks_in_memory)
         self.event_callback = callback
         self.group_handlers = {"worker": self.worker_event,
                                "task": self.task_event}
@@ -195,9 +216,10 @@ class State(object):
 
     def _clear_tasks(self, ready=True):
         if ready:
-            self.tasks = dict((uuid, task)
-                                for uuid, task in self.tasks.items()
-                                    if task.state not in states.READY_STATES)
+            in_progress = dict((uuid, task) for uuid, task in self.itertasks()
+                                if task.state not in states.READY_STATES)
+            self.tasks.clear()
+            self.tasks.update(in_progress)
         else:
             self.tasks.clear()
 
@@ -265,13 +287,19 @@ class State(object):
         if self.event_callback:
             self.event_callback(self, event)
 
+    def itertasks(self, limit=None):
+        for index, row in enumerate(self.tasks.iteritems()):
+            yield row
+            if limit and index >= limit:
+                break
+
     def tasks_by_timestamp(self, limit=None):
         """Get tasks by timestamp.
 
         Returns a list of `(uuid, task)` tuples.
 
         """
-        return self._sort_tasks_by_time(self.tasks.items()[:limit])
+        return self._sort_tasks_by_time(self.itertasks(limit))
 
     def _sort_tasks_by_time(self, tasks):
         """Sort task items by time."""
@@ -285,7 +313,7 @@ class State(object):
 
         """
         return self._sort_tasks_by_time([(uuid, task)
-                for uuid, task in self.tasks.items()[:limit]
+                for uuid, task in self.itertasks(limit)
                     if task.name == name])
 
     def tasks_by_worker(self, hostname, limit=None):
@@ -295,12 +323,12 @@ class State(object):
 
         """
         return self._sort_tasks_by_time([(uuid, task)
-                for uuid, task in self.tasks.items()[:limit]
+                for uuid, task in self.itertasks(limit)
                     if task.worker.hostname == hostname])
 
     def task_types(self):
         """Returns a list of all seen task types."""
-        return list(sorted(set(task.name for task in self.tasks.values())))
+        return list(sorted(set(task.name for task in self.tasks.itervalues())))
 
     def alive_workers(self):
         """Returns a list of (seemingly) alive workers."""

+ 29 - 0
celery/exceptions.py

@@ -1,3 +1,24 @@
+"""
+
+celery.exceptions
+=================
+
+This module contains Celery-specific exceptions.
+
+"""
+
+from __future__ import absolute_import
+
+__all__ = ["SystemTerminate", "QueueNotFound",
+           "TimeLimitExceeded", "SoftTimeLimitExceeded",
+           "WorkerLostError", "ImproperlyConfigured",
+           "NotRegistered", "AlreadyRegistered",
+           "TimeoutError", "MaxRetriesExceededError",
+           "RetryTaskError", "TaskRevokedError",
+           "NotConfigured", "CPendingDeprecationWarning",
+           "CDeprecationWarning"]
+
+
 UNREGISTERED_FMT = """\
 Task of kind %s is not registered, please make sure it's imported.\
 """
@@ -61,3 +82,11 @@ class TaskRevokedError(Exception):
 
 class NotConfigured(UserWarning):
     """Celery has not been configured, as no config module has been found."""
+
+
+class CPendingDeprecationWarning(PendingDeprecationWarning):
+    pass
+
+
+class CDeprecationWarning(DeprecationWarning):
+    pass

+ 3 - 3
celery/execute/__init__.py

@@ -1,7 +1,7 @@
 from __future__ import absolute_import
 
-from celery import current_app
-from celery.utils import deprecated
+from .. import current_app
+from ..utils import deprecated
 
 send_task = current_app.send_task
 
@@ -21,5 +21,5 @@ def apply(task, *args, **kwargs):
 @deprecated(removal="2.3",
             alternative="Use registry.tasks[name].delay instead.")
 def delay_task(task, *args, **kwargs):
-    from celery.registry import tasks
+    from ..registry import tasks
     return tasks[task].apply_async(args, kwargs)

+ 19 - 8
celery/execute/trace.py

@@ -1,13 +1,23 @@
+"""
+celery.execute.trace
+====================
+
+This module defines how the task execution is traced,
+errors are recorded, handlers are applied and so on.
+
+"""
+
 from __future__ import absolute_import
 
 import sys
 import traceback
 
-from celery import states
-from celery import signals
-from celery.registry import tasks
-from celery.exceptions import RetryTaskError
-from celery.datastructures import ExceptionInfo
+from .. import states, signals
+from ..datastructures import ExceptionInfo
+from ..exceptions import RetryTaskError
+from ..registry import tasks
+
+__all__ = ["TraceInfo", "TaskTrace"]
 
 
 class TraceInfo(object):
@@ -72,7 +82,7 @@ class TaskTrace(object):
 
     def execute(self):
         self.task.request.update(self.request, args=self.args,
-                                               kwargs=self.kwargs)
+                                 called_directly=False, kwargs=self.kwargs)
         signals.task_prerun.send(sender=self.task, task_id=self.task_id,
                                  task=self.task, args=self.args,
                                  kwargs=self.kwargs)
@@ -92,13 +102,14 @@ class TaskTrace(object):
         handler = self._trace_handlers[trace.status]
         r = handler(trace.retval, trace.exc_type, trace.tb, trace.strtb)
         self.handle_after_return(trace.status, trace.retval,
-                                 trace.exc_type, trace.tb, trace.strtb)
+                                 trace.exc_type, trace.tb, trace.strtb,
+                                 einfo=trace.exc_info)
         return r
 
     def handle_after_return(self, status, retval, type_, tb, strtb,
             einfo=None):
         if status in states.EXCEPTION_STATES:
-            einfo = ExceptionInfo((retval, type_, tb))
+            einfo = ExceptionInfo(einfo)
         self.task.after_return(status, retval, self.task_id,
                                self.args, self.kwargs, einfo)
 

+ 19 - 10
celery/loaders/__init__.py

@@ -1,27 +1,36 @@
-from __future__ import absolute_import
+"""
+
+celery.loaders
+==============
 
-import os
+Loaders define how configuration is read, what happens
+when workers start, and when tasks are executed and so on.
+
+"""
+from __future__ import absolute_import
 
-from celery import current_app
-from celery.utils import get_cls_by_name
+from .. import current_app
+from ..utils import deprecated, get_cls_by_name
 
 LOADER_ALIASES = {"app": "celery.loaders.app.AppLoader",
                   "default": "celery.loaders.default.Loader",
                   "django": "djcelery.loaders.DjangoLoader"}
 
+__all__ = ["get_loader_cls"]
+
 
 def get_loader_cls(loader):
     """Get loader class by name/alias"""
     return get_cls_by_name(loader, LOADER_ALIASES)
 
 
-def setup_loader():     # XXX Deprecate
-    return get_loader_cls(os.environ.setdefault("CELERY_LOADER", "default"))()
-
-
-def current_loader():   # XXX Deprecate
+@deprecated(deprecation="2.5", removal="3.0",
+        alternative="celery.current_app.loader")
+def current_loader():
     return current_app.loader
 
 
-def load_settings():    # XXX Deprecate
+@deprecated(deprecation="2.5", removal="3.0",
+            alternative="celery.current_app.conf")
+def load_settings():
     return current_app.conf

+ 11 - 1
celery/loaders/app.py

@@ -1,6 +1,16 @@
+"""
+
+celery.loaders.app
+==================
+
+The default loader used with custom app instances.
+
+"""
 from __future__ import absolute_import
 
-from celery.loaders.base import BaseLoader
+from .base import BaseLoader
+
+__all__ = ["AppLoader"]
 
 
 class AppLoader(BaseLoader):

+ 35 - 14
celery/loaders/base.py

@@ -1,3 +1,12 @@
+"""
+
+celery.loaders.base
+===================
+
+Loader base class.
+
+"""
+
 from __future__ import absolute_import
 
 import importlib
@@ -6,12 +15,14 @@ import re
 import warnings
 
 from anyjson import deserialize
-from kombu.utils import cached_property
 
-from celery.datastructures import DictAttribute
-from celery.exceptions import ImproperlyConfigured
-from celery.utils import get_cls_by_name
-from celery.utils import import_from_cwd as _import_from_cwd
+from ..datastructures import DictAttribute
+from ..exceptions import ImproperlyConfigured
+from ..utils import (cached_property, get_cls_by_name,
+                     import_from_cwd as _import_from_cwd)
+from ..utils.functional import maybe_list
+
+__all__ = ["BaseLoader"]
 
 BUILTIN_MODULES = frozenset(["celery.task"])
 
@@ -47,7 +58,7 @@ class BaseLoader(object):
     _conf = None
 
     def __init__(self, app=None, **kwargs):
-        from celery.app import app_or_default
+        from ..app import app_or_default
         self.app = app_or_default(app)
 
     def on_task_init(self, task_id, task):
@@ -63,18 +74,23 @@ class BaseLoader(object):
         starts."""
         pass
 
+    def on_worker_process_init(self):
+        """This method is called when a child process starts."""
+        pass
+
     def import_task_module(self, module):
         return self.import_from_cwd(module)
 
-    def import_module(self, module):
-        return importlib.import_module(module)
+    def import_module(self, module, package=None):
+        return importlib.import_module(module, package=package)
 
-    def import_from_cwd(self, module, imp=None):
+    def import_from_cwd(self, module, imp=None, package=None):
         return _import_from_cwd(module,
-                self.import_module if imp is None else imp)
+                self.import_module if imp is None else imp,
+                package=package)
 
     def import_default_modules(self):
-        imports = set(list(self.conf.get("CELERY_IMPORTS") or ()))
+        imports = set(maybe_list(self.conf.get("CELERY_IMPORTS") or ()))
         return [self.import_task_module(module)
                     for module in imports | self.builtin_modules]
 
@@ -83,6 +99,9 @@ class BaseLoader(object):
             self.worker_initialized = True
             self.on_worker_init()
 
+    def init_worker_process(self):
+        self.on_worker_process_init()
+
     def config_from_envvar(self, variable_name, silent=False):
         module_name = os.environ.get(variable_name)
         if not module_name:
@@ -113,7 +132,7 @@ class BaseLoader(object):
                 override_types={"tuple": "json",
                                 "list": "json",
                                 "dict": "json"}):
-        from celery.app.defaults import Option, NAMESPACES
+        from ..app.defaults import Option, NAMESPACES
         namespace = namespace.upper()
         typemap = dict(Option.typemap, **extra_types)
 
@@ -155,13 +174,15 @@ class BaseLoader(object):
 
     def mail_admins(self, subject, body, fail_silently=False,
             sender=None, to=None, host=None, port=None,
-            user=None, password=None, timeout=None, use_ssl=False):
+            user=None, password=None, timeout=None,
+            use_ssl=False, use_tls=False):
         try:
             message = self.mail.Message(sender=sender, to=to,
                                         subject=subject, body=body)
             mailer = self.mail.Mailer(host=host, port=port,
                                       user=user, password=password,
-                                      timeout=timeout, use_ssl=use_ssl)
+                                      timeout=timeout, use_ssl=use_ssl,
+                                      use_tls=use_tls)
             mailer.send(message)
         except Exception, exc:
             if not fail_silently:

+ 15 - 4
celery/loaders/default.py

@@ -1,15 +1,26 @@
+"""
+
+celery.loader.default
+=====================
+
+The default loader used when no custom app has been initialized.
+
+"""
 from __future__ import absolute_import
 
 import os
 import warnings
 
-from celery.datastructures import AttributeDict
-from celery.exceptions import NotConfigured
-from celery.loaders.base import BaseLoader
-from celery.utils import find_module
+from ..datastructures import AttributeDict
+from ..exceptions import NotConfigured
+from ..utils import find_module
+
+from .base import BaseLoader
 
 DEFAULT_CONFIG_MODULE = "celeryconfig"
 
+__all__ = ["Loader"]
+
 
 class Loader(BaseLoader):
     """The loader used by the default app."""

+ 21 - 3
celery/local.py

@@ -1,4 +1,20 @@
+"""
+
+celery.local
+============
+
+This module contains critical utilities that
+needs to be loaded as soon as possible, and that
+should not load any third party modules.
+
+"""
+
+from __future__ import absolute_import
+
+
 def try_import(module):
+    """Try to import and return module, or return
+    None if the module does not exist."""
     from importlib import import_module
     try:
         return import_module(module)
@@ -6,12 +22,14 @@ def try_import(module):
         pass
 
 
-class LocalProxy(object):
-    """Code stolen from werkzeug.local.LocalProxy."""
+class Proxy(object):
+    """Proxy to another object."""
+
+    # Code stolen from werkzeug.local.Proxy.
     __slots__ = ('__local', '__dict__', '__name__')
 
     def __init__(self, local, name=None):
-        object.__setattr__(self, '_LocalProxy__local', local)
+        object.__setattr__(self, '_Proxy__local', local)
         object.__setattr__(self, '__name__', name)
 
     def _get_current_object(self):

+ 50 - 30
celery/log.py

@@ -1,4 +1,11 @@
-"""celery.log"""
+"""
+
+celery.log
+==========
+
+Logging utilities.
+
+"""
 from __future__ import absolute_import
 
 import logging
@@ -12,14 +19,21 @@ try:
 except ImportError:
     current_process = mputil = None  # noqa
 
-from celery import signals
-from celery import current_app
-from celery.utils import LOG_LEVELS, isatty
-from celery.utils.compat import LoggerAdapter
-from celery.utils.compat import WatchedFileHandler
-from celery.utils.encoding import safe_str
-from celery.utils.patch import ensure_process_aware_logger
-from celery.utils.term import colored
+from . import current_app
+from . import signals
+from .local import Proxy
+from .utils import LOG_LEVELS, isatty
+from .utils.compat import LoggerAdapter, WatchedFileHandler
+from .utils.encoding import safe_str
+from .utils.patch import ensure_process_aware_logger
+from .utils.term import colored
+
+is_py3k = sys.version_info >= (3, 0)
+
+__all__ = ["ColorFormatter", "Logging", "get_default_logger",
+           "setup_logger", "setup_task_logger", "get_task_logger",
+           "setup_logging_subsystem", "redirect_stdouts_to_logger",
+           "LoggingProxy"]
 
 
 class ColorFormatter(logging.Formatter):
@@ -34,8 +48,8 @@ class ColorFormatter(logging.Formatter):
 
     def formatException(self, ei):
         r = logging.Formatter.formatException(self, ei)
-        if isinstance(r, str):
-            return r.decode("utf-8", "replace")    # Convert to unicode
+        if isinstance(r, str) and not is_py3k:
+            return safe_str(r)
         return r
 
     def format(self, record):
@@ -50,16 +64,15 @@ class ColorFormatter(logging.Formatter):
                         type(record.msg), exc)
                 record.exc_info = sys.exc_info()
 
-        # Very ugly, but have to make sure processName is supported
-        # by foreign logger instances.
-        # (processName is always supported by Python 2.7)
-        if "processName" not in record.__dict__:
-            process_name = current_process and current_process()._name or ""
-            record.__dict__["processName"] = process_name
-        t = logging.Formatter.format(self, record)
-        if isinstance(t, unicode):
-            return t.encode("utf-8", "replace")
-        return t
+        if not is_py3k:
+            # Very ugly, but have to make sure processName is supported
+            # by foreign logger instances.
+            # (processName is always supported by Python 2.7)
+            if "processName" not in record.__dict__:
+                process_name = (current_process and
+                                current_process()._name or "")
+                record.__dict__["processName"] = process_name
+        return safe_str(logging.Formatter.format(self, record))
 
 
 class Logging(object):
@@ -105,7 +118,8 @@ class Logging(object):
 
         if mputil and hasattr(mputil, "_logger"):
             mputil._logger = None
-        ensure_process_aware_logger()
+        if not is_py3k:
+            ensure_process_aware_logger()
         receivers = signals.setup_logging.send(sender=None,
                         loglevel=loglevel, logfile=logfile,
                         format=format, colorize=colorize)
@@ -192,7 +206,8 @@ class Logging(object):
         return LoggerAdapter(logger, {"task_id": task_id,
                                       "task_name": task_name})
 
-    def redirect_stdouts_to_logger(self, logger, loglevel=None):
+    def redirect_stdouts_to_logger(self, logger, loglevel=None,
+            stdout=True, stderr=True):
         """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
         logging instance.
 
@@ -201,7 +216,10 @@ class Logging(object):
 
         """
         proxy = LoggingProxy(logger, loglevel)
-        sys.stdout = sys.stderr = proxy
+        if stdout:
+            sys.stdout = sys.__stdout__ = proxy
+        if stderr:
+            sys.stderr = sys.__stderr__ = proxy
         return proxy
 
     def _setup_logger(self, logger, logfile, format, colorize,
@@ -216,12 +234,14 @@ class Logging(object):
         return logger
 
 
-setup_logging_subsystem = current_app.log.setup_logging_subsystem
-get_default_logger = current_app.log.get_default_logger
-setup_logger = current_app.log.setup_logger
-setup_task_logger = current_app.log.setup_task_logger
-get_task_logger = current_app.log.get_task_logger
-redirect_stdouts_to_logger = current_app.log.redirect_stdouts_to_logger
+get_default_logger = Proxy(lambda: current_app.log.get_default_logger)
+setup_logger = Proxy(lambda: current_app.log.setup_logger)
+setup_task_logger = Proxy(lambda: current_app.log.setup_task_logger)
+get_task_logger = Proxy(lambda: current_app.log.get_task_logger)
+setup_logging_subsystem = Proxy(
+            lambda: current_app.log.setup_logging_subsystem)
+redirect_stdouts_to_logger = Proxy(
+            lambda: current_app.log.redirect_stdouts_to_logger)
 
 
 class LoggingProxy(object):

+ 19 - 8
celery/messaging.py

@@ -1,8 +1,19 @@
-from celery import current_app
-
-TaskPublisher = current_app.amqp.TaskPublisher
-ConsumerSet = current_app.amqp.ConsumerSet
-TaskConsumer = current_app.amqp.TaskConsumer
-establish_connection = current_app.broker_connection
-with_connection = current_app.with_default_connection
-get_consumer_set = current_app.amqp.get_task_consumer
+"""
+
+celery.messaging
+================
+
+This module is deprecated, use ``current_app.amqp`` instead.
+
+"""
+from __future__ import absolute_import
+
+from . import current_app
+from .local import Proxy
+
+TaskPublisher = Proxy(lambda: current_app.amqp.TaskPublisher)
+ConsumerSet = Proxy(lambda: current_app.amqp.ConsumerSet)
+TaskConsumer = Proxy(lambda: current_app.amqp.TaskConsumer)
+establish_connection = Proxy(lambda: current_app.broker_connection)
+with_connection = Proxy(lambda: current_app.with_default_connection)
+get_consumer_set = Proxy(lambda: current_app.amqp.get_task_consumer)

+ 177 - 23
celery/platforms.py

@@ -1,27 +1,69 @@
+"""
+
+celery.platforms
+================
+
+Utilities dealing with platform specifics: signals, daemonization, users &
+groups, etc.
+
+"""
 from __future__ import absolute_import
 
-import os
-import sys
 import errno
+import os
+import platform as _platform
+import shlex
 import signal as _signal
+import sys
 
-from celery.local import try_import
+from .local import try_import
 
 _setproctitle = try_import("setproctitle")
 resource = try_import("resource")
 pwd = try_import("pwd")
 grp = try_import("grp")
 
+SYSTEM = _platform.system()
+IS_OSX = SYSTEM == "Darwin"
+IS_WINDOWS = SYSTEM == "Windows"
+
 DAEMON_UMASK = 0
 DAEMON_WORKDIR = "/"
 DAEMON_REDIRECT_TO = getattr(os, "devnull", "/dev/null")
 
+__all__ = ["LockFailed", "get_fdmax", "create_pidlock",
+           "DaemonContext", "detached", "parse_uid", "parse_gid",
+           "setegid", "seteuid", "set_effective_user", "Signals",
+           "set_process_title", "set_mp_process_title", "pyimplementation"]
+
+
+def pyimplementation():
+    if hasattr(_platform, "python_implementation"):
+        return _platform.python_implementation()
+    elif sys.platform.startswith("java"):
+        return "Jython %s" % (sys.platform, )
+    elif hasattr(sys, "pypy_version_info"):
+        v = ".".join(map(str, sys.pypy_version_info[:3]))
+        if sys.pypy_version_info[3:]:
+            v += "-" + "".join(map(str, sys.pypy_version_info[3:]))
+        return "PyPy %s" % (v, )
+    else:
+        return "CPython"
+
 
 class LockFailed(Exception):
+    """Raised if a pidlock can't be acquired."""
     pass
 
 
 def get_fdmax(default=None):
+    """Returns the maximum number of open file descriptors
+    on this system.
+
+    :keyword default: Value returned if there's no file
+                      descriptor limit.
+
+    """
     fdmax = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
     if fdmax == resource.RLIM_INFINITY:
         return default
@@ -29,22 +71,23 @@ def get_fdmax(default=None):
 
 
 class PIDFile(object):
+    """PID lock file.
+
+    This is the type returned by :func:`create_pidlock`.
+
+    **Should not be used directly, use the :func:`create_pidlock`
+    context instead**
+
+    """
+
+    #: Path to the pid lock file.
+    path = None
 
     def __init__(self, path):
         self.path = os.path.abspath(path)
 
-    def write_pid(self):
-        open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
-        open_mode = (((os.R_OK | os.W_OK) << 6) |
-                        ((os.R_OK) << 3) |
-                        ((os.R_OK)))
-        pidfile_fd = os.open(self.path, open_flags, open_mode)
-        pidfile = os.fdopen(pidfile_fd, "w")
-        pid = os.getpid()
-        pidfile.write("%d\n" % (pid, ))
-        pidfile.close()
-
     def acquire(self):
+        """Acquire lock."""
         try:
             self.write_pid()
         except OSError, exc:
@@ -53,13 +96,16 @@ class PIDFile(object):
     __enter__ = acquire
 
     def is_locked(self):
+        """Returns true if the pid lock exists."""
         return os.path.exists(self.path)
 
     def release(self, *args):
+        """Release lock."""
         self.remove()
     __exit__ = release
 
     def read_pid(self):
+        """Reads and returns the current pid."""
         try:
             fh = open(self.path, "r")
         except IOError, exc:
@@ -76,6 +122,7 @@ class PIDFile(object):
             raise ValueError("PID file %r contents invalid." % self.path)
 
     def remove(self):
+        """Removes the lock."""
         try:
             os.unlink(self.path)
         except OSError, exc:
@@ -84,6 +131,8 @@ class PIDFile(object):
             raise
 
     def remove_if_stale(self):
+        """Removes the lock if the process is not running.
+        (does not respond to signals)."""
         try:
             pid = self.read_pid()
         except ValueError, exc:
@@ -103,13 +152,39 @@ class PIDFile(object):
                 return True
         return False
 
+    def write_pid(self):
+        open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+        open_mode = (((os.R_OK | os.W_OK) << 6) |
+                        ((os.R_OK) << 3) |
+                        ((os.R_OK)))
+        pidfile_fd = os.open(self.path, open_flags, open_mode)
+        pidfile = os.fdopen(pidfile_fd, "w")
+        try:
+            pid = os.getpid()
+            pidfile.write("%d\n" % (pid, ))
+        finally:
+            pidfile.close()
+
 
 def create_pidlock(pidfile):
-    """Create and verify pidfile.
+    """Create and verify pid file.
+
+    If the pid file already exists the program exits with an error message,
+    however if the process it refers to is not running anymore, the pid file
+    is deleted and the program continues.
+
+    The caller is responsible for releasing the lock before the program
+    exits.
+
+    :returns: :class:`PIDFile`.
 
-    If the pidfile already exists the program exits with an error message,
-    however if the process it refers to is not running anymore, the pidfile
-    is just deleted.
+    **Example**:
+
+    .. code-block:: python
+
+        import atexit
+        pidlock = create_pidlock("/var/run/app.pid").acquire()
+        atexit.register(pidlock.release)
 
     """
 
@@ -168,6 +243,41 @@ class DaemonContext(object):
 
 def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
              workdir=None, **opts):
+    """Detach the current process in the background (daemonize).
+
+    :keyword logfile: Optional log file.  The ability to write to this file
+       will be verified before the process is detached.
+    :keyword pidfile: Optional pid file.  The pid file will not be created,
+      as this is the responsibility of the child.  But the process will
+      exit if the pid lock exists and the pid written is still running.
+    :keyword uid: Optional user id or user name to change
+      effective privileges to.
+    :keyword gid: Optional group id or group name to change effective
+      privileges to.
+    :keyword umask: Optional umask that will be effective in the child process.
+    :keyword workdir: Optional new working directory.
+    :keyword \*\*opts: Ignored.
+
+    **Example**:
+
+    .. code-block:: python
+
+        import atexit
+        from celery.platforms import detached, create_pidlock
+
+        with detached(logfile="/var/log/app.log", pidfile="/var/run/app.pid",
+                      uid="nobody"):
+            # Now in detached child process with effective user set to nobody,
+            # and we know that our logfile can be written to, and that
+            # the pidfile is not locked.
+            pidlock = create_pidlock("/var/run/app.pid").acquire()
+            atexit.register(pidlock.release)
+
+            # Run the program
+            program.run(logfile="/var/log/app.log")
+
+    """
+
     if not resource:
         raise RuntimeError("This platform does not support detach.")
     workdir = os.getcwd() if workdir is None else workdir
@@ -187,7 +297,7 @@ def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
 def parse_uid(uid):
     """Parse user id.
 
-    uid can be an interger (uid) or a string (username), if a username
+    uid can be an integer (uid) or a string (user name), if a user name
     the uid is taken from the password file.
 
     """
@@ -237,19 +347,19 @@ def seteuid(uid):
 def set_effective_user(uid=None, gid=None):
     """Change process privileges to new user/group.
 
-    If uid and gid is set the effective user/group is set.
+    If UID and GID is set the effective user/group is set.
 
-    If only uid is set, the effective uer is set, and the group is
+    If only UID is set, the effective user is set, and the group is
     set to the users primary group.
 
-    If only gid is set, the effective group is set.
+    If only GID is set, the effective group is set.
 
     """
     uid = uid and parse_uid(uid)
     gid = gid and parse_gid(gid)
 
     if uid:
-        # If gid isn't defined, get the primary gid of the uer.
+        # If GID isn't defined, get the primary GID of the user.
         if not gid and pwd:
             gid = pwd.getpwuid(uid).pw_gid
         setegid(gid)
@@ -259,6 +369,42 @@ def set_effective_user(uid=None, gid=None):
 
 
 class Signals(object):
+    """Convenience interface to :mod:`signals`.
+
+    If the requested signal is not supported on the current platform,
+    the operation will be ignored.
+
+    **Examples**:
+
+    .. code-block:: python
+
+        >>> from celery.platforms import signals
+
+        >>> signals["INT"] = my_handler
+
+        >>> signals["INT"]
+        my_handler
+
+        >>> signals.supported("INT")
+        True
+
+        >>> signals.signum("INT")
+        2
+
+        >>> signals.ignore("USR1")
+        >>> signals["USR1"] == signals.ignored
+        True
+
+        >>> signals.reset("USR1")
+        >>> signals["USR1"] == signals.default
+        True
+
+        >>> signals.update(INT=exit_handler,
+        ...                TERM=exit_handler,
+        ...                HUP=hup_handler)
+
+    """
+
     ignored = _signal.SIG_IGN
     default = _signal.SIG_DFL
 
@@ -361,3 +507,11 @@ def set_mp_process_title(progname, info=None, hostname=None):
     else:
         return set_process_title("%s:%s" % (progname,
                                             current_process().name), info=info)
+
+
+def shellsplit(s, posix=True):
+    # posix= option to shlex.split first available in Python 2.6+
+    lexer = shlex.shlex(s, posix=not IS_WINDOWS)
+    lexer.whitespace_split = True
+    lexer.commenters = ''
+    return list(lexer)

+ 11 - 2
celery/registry.py

@@ -1,9 +1,18 @@
-"""celery.registry"""
+"""
+
+celery.registry
+===============
+
+Registry of available tasks.
+
+"""
 from __future__ import absolute_import
 
 import inspect
 
-from celery.exceptions import NotRegistered
+from .exceptions import NotRegistered
+
+__all__ = ["TaskRegistry", "tasks"]
 
 
 class TaskRegistry(dict):

+ 25 - 10
celery/result.py

@@ -1,3 +1,10 @@
+"""
+celery.result
+=============
+
+Task results/state, and result groups.
+
+"""
 from __future__ import absolute_import
 from __future__ import with_statement
 
@@ -6,11 +13,15 @@ import time
 from copy import copy
 from itertools import imap
 
-from celery import current_app
-from celery import states
-from celery.app import app_or_default
-from celery.exceptions import TimeoutError
-from celery.registry import _unpickle_task
+from . import current_app
+from . import states
+from .app import app_or_default
+from .exceptions import TimeoutError
+from .registry import _unpickle_task
+from .utils.compat import OrderedDict
+
+__all__ = ["BaseAsyncResult", "AsyncResult", "ResultSet",
+           "TaskSetResult", "EagerResult"]
 
 
 def _unpickle_result(task_id, task_name):
@@ -327,15 +338,19 @@ class ResultSet(object):
 
         """
         elapsed = 0.0
-        results = dict((result.task_id, copy(result))
-                            for result in self.results)
+        results = OrderedDict((result.task_id, copy(result))
+                                for result in self.results)
 
         while results:
             removed = set()
             for task_id, result in results.iteritems():
-                yield result.get(timeout=timeout and timeout - elapsed,
-                                 propagate=propagate, interval=0.0)
-                removed.add(task_id)
+                if result.ready():
+                    yield result.get(timeout=timeout and timeout - elapsed,
+                                     propagate=propagate)
+                    removed.add(task_id)
+                else:
+                    if result.backend.subpolling_interval:
+                        time.sleep(result.backend.subpolling_interval)
             for task_id in removed:
                 results.pop(task_id, None)
             time.sleep(interval)

+ 13 - 3
celery/routes.py

@@ -1,7 +1,17 @@
+"""
+celery.routes
+=============
+
+Contains utilities for working with task routes (e.g.
+:setting:`CELERY_ROUTES`).
+
+"""
 from __future__ import absolute_import
 
-from celery.exceptions import QueueNotFound
-from celery.utils import firstmethod, instantiate, lpmerge, mpromise
+from .exceptions import QueueNotFound
+from .utils import firstmethod, instantiate, lpmerge, mpromise
+
+_all__ = ["MapRoute", "Router", "prepare"]
 
 _first_route = firstmethod("route_for_task")
 
@@ -22,7 +32,7 @@ class Router(object):
 
     def __init__(self, routes=None, queues=None, create_missing=False,
             app=None):
-        from celery.app import app_or_default
+        from .app import app_or_default
         self.app = app_or_default(app)
         self.queues = {} if queues is None else queues
         self.routes = [] if routes is None else routes

+ 75 - 62
celery/schedules.py

@@ -1,13 +1,27 @@
+"""
+celery.schedules
+================
+
+Schedules define when periodic tasks should be run.
+
+"""
+
 from __future__ import absolute_import
 
+import re
+
 from datetime import datetime, timedelta
 from dateutil.relativedelta import relativedelta
-from pyparsing import (Word, Literal, ZeroOrMore, Optional,
-                       Group, StringEnd, alphas)
 
-from celery.utils import is_iterable
-from celery.utils.timeutils import (timedelta_seconds, weekday,
-                                    remaining, humanize_seconds)
+from .utils import is_iterable
+from .utils.timeutils import (timedelta_seconds, weekday,
+                              remaining, humanize_seconds)
+
+__all__ = ["ParseException", "schedule", "crontab", "maybe_schedule"]
+
+
+class ParseException(Exception):
+    """Raised by crontab_parser when the input can't be parsed."""
 
 
 class schedule(object):
@@ -50,8 +64,8 @@ class schedule(object):
         return False, rem
 
     def __repr__(self):
-        return "<freq: %s>" % humanize_seconds(
-                timedelta_seconds(self.run_every))
+        return "<freq: %s>" % (
+                    humanize_seconds(timedelta_seconds(self.run_every)), )
 
     def __eq__(self, other):
         if isinstance(other, schedule):
@@ -85,70 +99,69 @@ class crontab_parser(object):
         [0, 1, 2, 3, 4, 5, 6]
 
     """
+    ParseException = ParseException
 
-    def __init__(self, max_=60):
-        # define the grammar structure
-        digits = "0123456789"
-        star = Literal('*')
-        number = Word(digits) | Word(alphas)
-        steps = number
-        range_ = number + Optional(Literal('-') + number)
-        numspec = star | range_
-        expr = Group(numspec) + Optional(Literal('/') + steps)
-        extra_groups = ZeroOrMore(Literal(',') + expr)
-        groups = expr + extra_groups + StringEnd()
-
-        # define parse actions
-        star.setParseAction(self._expand_star)
-        number.setParseAction(self._expand_number)
-        range_.setParseAction(self._expand_range)
-        expr.setParseAction(self._filter_steps)
-        extra_groups.setParseAction(self._ignore_comma)
-        groups.setParseAction(self._join_to_set)
+    _range = r'(\w+?)-(\w+)'
+    _steps = r'/(\w+)?'
+    _star = r'\*'
 
+    def __init__(self, max_=60):
         self.max_ = max_
-        self.parser = groups
-
-    @staticmethod
-    def _expand_number(toks):
-        try:
-            i = int(toks[0])
-        except ValueError:
-            try:
-                i = weekday(toks[0])
-            except KeyError:
-                raise ValueError("Invalid weekday literal '%s'." % toks[0])
-        return [i]
-
-    @staticmethod
-    def _expand_range(toks):
+        self.pats = (
+                (re.compile(self._range + self._steps), self._range_steps),
+                (re.compile(self._range), self._expand_range),
+                (re.compile(self._star + self._steps), self._star_steps),
+                (re.compile('^' + self._star + '$'), self._expand_star))
+
+    def parse(self, spec):
+        acc = set()
+        for part in spec.split(','):
+            if not part:
+                raise self.ParseException("empty part")
+            acc |= set(self._parse_part(part))
+        return acc
+
+    def _parse_part(self, part):
+        for regex, handler in self.pats:
+            m = regex.match(part)
+            if m:
+                return handler(m.groups())
+        return self._expand_range((part, ))
+
+    def _expand_range(self, toks):
+        fr = self._expand_number(toks[0])
         if len(toks) > 1:
-            return range(toks[0], int(toks[2]) + 1)
-        else:
-            return toks[0]
+            to = self._expand_number(toks[1])
+            return range(fr, min(to + 1, self.max_ + 1))
+        return [fr]
 
-    def _expand_star(self, toks):
-        return range(self.max_)
+    def _range_steps(self, toks):
+        if len(toks) != 3 or not toks[2]:
+            raise self.ParseException("empty filter")
+        return self._filter_steps(self._expand_range(toks[:2]), int(toks[2]))
 
-    @staticmethod
-    def _filter_steps(toks):
-        numbers = toks[0]
-        if len(toks) > 1:
-            steps = toks[2]
-            return [n for n in numbers if n % steps == 0]
-        else:
-            return numbers
+    def _star_steps(self, toks):
+        if not toks or not toks[0]:
+            raise self.ParseException("empty filter")
+        return self._filter_steps(self._expand_star(), int(toks[0]))
 
-    @staticmethod
-    def _ignore_comma(toks):
-        return [x for x in toks if x != ',']
+    def _filter_steps(self, numbers, steps):
+        return [n for n in numbers if n % steps == 0]
 
-    @staticmethod
-    def _join_to_set(toks):
-        return set(toks.asList())
+    def _expand_star(self, *args):
+        return range(self.max_)
 
-    def parse(self, cronspec):
-        return self.parser.parseString(cronspec).pop()
+    def _expand_number(self, s):
+        if isinstance(s, basestring) and s[0] == '-':
+            raise self.ParseException("negative numbers not supported")
+        try:
+            i = int(s)
+        except ValueError:
+            try:
+                i = weekday(s)
+            except KeyError:
+                raise ValueError("Invalid weekday literal '%s'." % s)
+        return i
 
 
 class crontab(schedule):

+ 8 - 8
celery/signals.py

@@ -1,4 +1,5 @@
 """
+
 ==============
 celery.signals
 ==============
@@ -6,9 +7,6 @@ celery.signals
 Signals allows decoupled applications to receive notifications when
 certain actions occur elsewhere in the application.
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
-
 .. contents::
     :local:
 
@@ -221,7 +219,7 @@ eventlet_pool_started
 
 Sent when the eventlet pool has been started.
 
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
+Sender is the :class:`celery.concurrency.eventlet.TaskPool` instance.
 
 .. signal:: eventlet_pool_preshutdown
 
@@ -231,7 +229,7 @@ eventlet_pool_preshutdown
 Sent when the worker shutdown, just before the eventlet pool
 is requested to wait for remaining workers.
 
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
+Sender is the :class:`celery.concurrency.eventlet.TaskPool` instance.
 
 .. signal:: eventlet_pool_postshutdown
 
@@ -240,7 +238,7 @@ eventlet_pool_postshutdown
 
 Sent when the pool has been joined and the worker is ready to shutdown.
 
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
+Sender is the :class:`celery.concurrency.eventlet.TaskPool` instance.
 
 .. signal:: eventlet_pool_apply
 
@@ -249,7 +247,7 @@ eventlet_pool_apply
 
 Sent whenever a task is applied to the pool.
 
-Sender is the :class:`celery.concurrency.evlet.TaskPool` instance.
+Sender is the :class:`celery.concurrency.eventlet.TaskPool` instance.
 
 Provides arguments:
 
@@ -347,7 +345,9 @@ Provides arguments:
 
 
 """
-from celery.utils.dispatch import Signal
+from __future__ import absolute_import
+
+from .utils.dispatch import Signal
 
 task_sent = Signal(providing_args=["task_id", "task",
                                    "args", "kwargs",

+ 3 - 4
celery/states.py

@@ -4,10 +4,6 @@ celery.states
 
 Built-in Task States.
 
-:copyright: (c) 2009 - 2011 by Ask Solem.
-:license: BSD, see LICENSE for more details.
-
-
 .. _states:
 
 States
@@ -59,6 +55,9 @@ Misc.
 -----
 
 """
+from __future__ import absolute_import
+
+__all__ = ["precedence", "state"]
 
 #: State precedence.
 #: None represents the precedence of an unknown state.

+ 21 - 7
celery/task/__init__.py

@@ -1,14 +1,28 @@
 # -*- coding: utf-8 -*-
+"""
+
+celery.task
+===========
+
+Creating tasks and subtasks
+
+"""
+from __future__ import absolute_import
+
 import warnings
 
-from celery.app import app_or_default
-from celery.task.base import Task, PeriodicTask
-from celery.task.sets import TaskSet, subtask
-from celery.task.chords import chord
-from celery.task.control import discard_all
+from ..app import app_or_default
+from ..exceptions import CDeprecationWarning
+
+from .base import Task, PeriodicTask
+from .sets import TaskSet, subtask
+from .chords import chord
+from .control import discard_all
 
 __all__ = ["Task", "TaskSet", "PeriodicTask", "subtask",
-           "discard_all", "chord"]
+           "discard_all", "chord", "group"]
+
+group = TaskSet
 
 
 def task(*args, **kwargs):
@@ -95,7 +109,7 @@ def ping():  # ✞
     Please use :meth:`celery.task.control.ping` instead.
 
     """
-    warnings.warn(DeprecationWarning(
+    warnings.warn(CDeprecationWarning(
         "The ping task has been deprecated and will be removed in Celery "
         "v2.3.  Please use inspect.ping instead."))
     return PingTask.apply_async().get()

+ 16 - 5
celery/task/base.py

@@ -1,8 +1,19 @@
-from celery import current_app
-from celery.app.task import Context, TaskType, BaseTask  # noqa
-from celery.schedules import maybe_schedule
-from celery.utils import deprecated
-from celery.utils import timeutils
+"""
+
+celery.task.base
+================
+
+The task implementation has been moved to :class:`celery.app.task`.
+
+"""
+from __future__ import absolute_import
+
+from .. import current_app
+from ..app.task import Context, TaskType, BaseTask  # noqa
+from ..schedules import maybe_schedule
+from ..utils import deprecated, timeutils
+
+__all__ = ["Task", "PeriodicTask"]
 
 Task = current_app.Task
 

+ 22 - 10
celery/task/chords.py

@@ -1,8 +1,20 @@
-from kombu.utils import gen_unique_id
+"""
 
-from celery import current_app
-from celery.result import TaskSetResult
-from celery.task.sets import TaskSet, subtask
+celery.task.chords
+==================
+
+Task chords (task set callbacks).
+
+"""
+from __future__ import absolute_import
+
+from .. import current_app
+from ..result import TaskSetResult
+from ..utils import uuid
+
+from .sets import TaskSet, subtask
+
+__all__ = ["Chord", "chord"]
 
 
 @current_app.task(name="celery.chord_unlock", max_retries=None)
@@ -25,11 +37,11 @@ class Chord(current_app.Task):
         if not isinstance(set, TaskSet):
             set = TaskSet(set)
         r = []
-        setid = gen_unique_id()
+        setid = uuid()
         for task in set.tasks:
-            uuid = gen_unique_id()
-            task.options.update(task_id=uuid, chord=body)
-            r.append(current_app.AsyncResult(uuid))
+            tid = uuid()
+            task.options.update(task_id=tid, chord=body)
+            r.append(current_app.AsyncResult(tid))
         current_app.TaskSetResult(setid, r).save()
         self.backend.on_chord_apply(setid, body, interval,
                                     max_retries=max_retries,
@@ -45,7 +57,7 @@ class chord(object):
         self.options = options
 
     def __call__(self, body, **options):
-        uuid = body.options.setdefault("task_id", gen_unique_id())
+        tid = body.options.setdefault("task_id", uuid())
         self.Chord.apply_async((list(self.tasks), body), self.options,
                                 **options)
-        return body.type.app.AsyncResult(uuid)
+        return body.type.app.AsyncResult(tid)

+ 20 - 7
celery/task/control.py

@@ -1,9 +1,22 @@
+"""
+
+celery.task.control
+===================
+
+The worker remote control command client.
+For the server implementation see :mod:`celery.worker.control`.
+
+"""
 from __future__ import absolute_import
 from __future__ import with_statement
 
 from kombu.pidbox import Mailbox
 
-from celery.app import app_or_default
+from ..app import app_or_default
+
+__all__ = ["flatten_reply", "Inspect", "Control",
+           "broadcast", "rate_limit", "time_limit", "ping", "revoke",
+           "discard_all", "inspect"]
 
 
 def flatten_reply(reply):
@@ -52,7 +65,7 @@ class Inspect(object):
     def revoked(self):
         return self._request("dump_revoked")
 
-    def registered_tasks(self):
+    def registered(self):
         return self._request("dump_tasks")
 
     def enable_events(self):
@@ -76,6 +89,8 @@ class Inspect(object):
     def active_queues(self):
         return self._request("active_queues")
 
+    registered_tasks = registered
+
 
 class Control(object):
     Mailbox = Mailbox
@@ -98,8 +113,8 @@ class Control(object):
 
         """
         with self.app.default_connection(connection, connect_timeout) as conn:
-            with self.app.amqp.get_task_consumer(connection=conn) as consumer:
-                return consumer.discard_all()
+            return self.app.amqp.get_task_consumer(connection=conn)\
+                                .discard_all()
 
     def revoke(self, task_id, destination=None, terminate=False,
             signal="SIGTERM", **kwargs):
@@ -209,9 +224,7 @@ class Control(object):
         """
         with self.app.default_connection(connection, connect_timeout) as conn:
             if channel is None:
-                if not getattr(conn, "_producer_chan", None):
-                    conn._producer_chan = conn.channel()
-                channel = conn._producer_chan
+                channel = conn.default_channel
             return self.mailbox(conn)._broadcast(command, arguments,
                                                  destination, reply, timeout,
                                                  limit, callback,

+ 29 - 7
celery/task/http.py

@@ -1,3 +1,14 @@
+"""
+
+celery.task.http
+================
+
+Webhook tasks.
+
+"""
+from __future__ import absolute_import
+
+import sys
 import urllib2
 
 from urllib import urlencode
@@ -9,11 +20,14 @@ except ImportError:  # pragma: no cover
 
 from anyjson import deserialize
 
-from celery import __version__ as celery_version
-from celery.task.base import Task as BaseTask
+from .. import __version__ as celery_version
+from .base import Task as BaseTask
 
 GET_METHODS = frozenset(["GET", "HEAD"])
 
+__all__ = ["InvalidResponseError", "RemoteExecuteError", "UnknownStatusError",
+           "MutableURL", "HttpDispatch", "HttpDispatchTask", "URL"]
+
 
 class InvalidResponseError(Exception):
     """The remote server gave an invalid response."""
@@ -34,11 +48,19 @@ def maybe_utf8(value):
     return value
 
 
-def utf8dict(tup):
-    """With a dict's items() tuple return a new dict with any utf-8
-    keys/values encoded."""
-    return dict((key.encode("utf-8"), maybe_utf8(value))
-                    for key, value in tup)
+if sys.version_info >= (3, 0):
+
+    def utf8dict(tup):
+        if not isinstance(tup, dict):
+            return dict(tup)
+        return tup
+else:
+
+    def utf8dict(tup):  # noqa
+        """With a dict's items() tuple return a new dict with any utf-8
+        keys/values encoded."""
+        return dict((key.encode("utf-8"), maybe_utf8(value))
+                        for key, value in tup)
 
 
 def extract_response(raw_response):

+ 5 - 2
celery/task/schedules.py

@@ -1,7 +1,10 @@
+from __future__ import absolute_import
+
 import warnings
-from celery.schedules import schedule, crontab_parser, crontab
+from ..schedules import schedule, crontab_parser, crontab
+from ..exceptions import CDeprecationWarning
 
 __all__ = ["schedule", "crontab_parser", "crontab"]
 
-warnings.warn(DeprecationWarning(
+warnings.warn(CDeprecationWarning(
     "celery.task.schedules is deprecated and renamed to celery.schedules"))

+ 28 - 13
celery/task/sets.py

@@ -1,15 +1,24 @@
+"""
+
+celery.task.sets
+================
+
+Creating and applying task groups.
+
+"""
 from __future__ import absolute_import
 from __future__ import with_statement
 
 import warnings
 
-from kombu.utils import cached_property
+from .. import registry
+from ..app import app_or_default
+from ..datastructures import AttributeDict
+from ..exceptions import CDeprecationWarning
+from ..utils import cached_property, reprcall, uuid
+from ..utils.compat import UserList
 
-from celery import registry
-from celery.app import app_or_default
-from celery.datastructures import AttributeDict
-from celery.utils import gen_unique_id, reprcall
-from celery.utils.compat import UserList
+__all__ = ["subtask", "TaskSet"]
 
 TASKSET_DEPRECATION_TEXT = """\
 Using this invocation of TaskSet is deprecated and will be removed
@@ -18,7 +27,7 @@ in Celery v2.4!
 TaskSets now supports multiple types of tasks, the API has to reflect
 this so the syntax has been changed to:
 
-    from celery.task.sets import TaskSet
+    from celery.task import TaskSet
 
     ts = TaskSet(tasks=[
             %(cls)s.subtask(args1, kwargs1, options1),
@@ -106,6 +115,12 @@ class subtask(AttributeDict):
         return registry.tasks[self.task]
 
 
+def maybe_subtask(t):
+    if not isinstance(t, subtask):
+        return subtask(t)
+    return t
+
+
 class TaskSet(UserList):
     """A task containing several subtasks, making it possible
     to track how many, or when all of the tasks have been completed.
@@ -130,7 +145,7 @@ class TaskSet(UserList):
         self.app = app_or_default(app)
         if task is not None:
             if hasattr(task, "__iter__"):
-                tasks = task
+                tasks = [maybe_subtask(t) for t in task]
             else:
                 # Previously TaskSet only supported applying one kind of task.
                 # the signature then was TaskSet(task, arglist),
@@ -140,7 +155,7 @@ class TaskSet(UserList):
                 self._task_name = task.name
                 warnings.warn(TASKSET_DEPRECATION_TEXT % {
                                 "cls": task.__class__.__name__},
-                              DeprecationWarning)
+                              CDeprecationWarning)
         self.data = list(tasks or [])
         self.total = len(self.tasks)
         self.Publisher = Publisher or self.app.amqp.TaskPublisher
@@ -154,7 +169,7 @@ class TaskSet(UserList):
             return self.apply(taskset_id=taskset_id)
 
         with app.default_connection(connection, connect_timeout) as conn:
-            setid = taskset_id or gen_unique_id()
+            setid = taskset_id or uuid()
             pub = publisher or self.Publisher(connection=conn)
             try:
                 results = self._async_results(setid, pub)
@@ -170,7 +185,7 @@ class TaskSet(UserList):
 
     def apply(self, taskset_id=None):
         """Applies the taskset locally by blocking until all tasks return."""
-        setid = taskset_id or gen_unique_id()
+        setid = taskset_id or uuid()
         return self.app.TaskSetResult(setid, self._sync_results(setid))
 
     def _sync_results(self, taskset_id):
@@ -184,12 +199,12 @@ class TaskSet(UserList):
     def task(self):
         warnings.warn(
             "TaskSet.task is deprecated and will be removed in 1.4",
-            DeprecationWarning)
+            CDeprecationWarning)
         return self._task
 
     @property
     def task_name(self):
         warnings.warn(
             "TaskSet.task_name is deprecated and will be removed in 1.4",
-            DeprecationWarning)
+            CDeprecationWarning)
         return self._task_name

+ 3 - 0
celery/tests/__init__.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import logging
 import os
 import sys
@@ -11,6 +13,7 @@ os.environ.setdefault("CELERY_CONFIG_MODULE", config_module)
 os.environ["CELERY_LOADER"] = "default"
 os.environ["EVENTLET_NOPATCH"] = "yes"
 os.environ["GEVENT_NOPATCH"] = "yes"
+os.environ["KOMBU_DISABLE_LIMIT_PROTECTION"] = "yes"
 
 try:
     WindowsError = WindowsError  # noqa

+ 2 - 0
celery/tests/compat.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import sys
 
 

+ 2 - 0
celery/tests/config.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import os
 
 BROKER_TRANSPORT = "memory"

+ 2 - 0
celery/tests/functional/case.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import atexit
 import logging
 import os

+ 2 - 0
celery/tests/functional/tasks.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 import time
 
 from celery.task import task

+ 324 - 0
celery/tests/test_app/__init__.py

@@ -0,0 +1,324 @@
+from __future__ import absolute_import
+from __future__ import with_statement
+
+import os
+import sys
+
+from mock import Mock
+
+from celery import Celery
+from celery import app as _app
+from celery.app import defaults
+from celery.app.base import BaseApp
+from celery.loaders.base import BaseLoader
+from celery.platforms import pyimplementation
+from celery.utils.serialization import pickle
+
+from celery.tests import config
+from celery.tests.utils import (unittest, mask_modules, platform_pyimp,
+                                sys_platform, pypy_version)
+from celery.utils.mail import ErrorMail
+from kombu.utils import gen_unique_id
+
+THIS_IS_A_KEY = "this is a value"
+
+
+class Object(object):
+
+    def __init__(self, **kwargs):
+        for key, value in kwargs.items():
+            setattr(self, key, value)
+
+
+def _get_test_config():
+    return dict((key, getattr(config, key))
+                    for key in dir(config)
+                        if key.isupper() and not key.startswith("_"))
+
+test_config = _get_test_config()
+
+
+class test_App(unittest.TestCase):
+
+    def setUp(self):
+        self.app = Celery(set_as_current=False)
+        self.app.conf.update(test_config)
+
+    def test_task(self):
+        app = Celery("foozibari", set_as_current=False)
+
+        def fun():
+            pass
+
+        fun.__module__ = "__main__"
+        task = app.task(fun)
+        self.assertEqual(task.name, app.main + ".fun")
+
+    def test_repr(self):
+        self.assertTrue(repr(self.app))
+
+    def test_TaskSet(self):
+        ts = self.app.TaskSet()
+        self.assertListEqual(ts.tasks, [])
+        self.assertIs(ts.app, self.app)
+
+    def test_pickle_app(self):
+        changes = dict(THE_FOO_BAR="bars",
+                       THE_MII_MAR="jars")
+        self.app.conf.update(changes)
+        saved = pickle.dumps(self.app)
+        self.assertLess(len(saved), 2048)
+        restored = pickle.loads(saved)
+        self.assertDictContainsSubset(changes, restored.conf)
+
+    def test_worker_main(self):
+        from celery.bin import celeryd
+
+        class WorkerCommand(celeryd.WorkerCommand):
+
+            def execute_from_commandline(self, argv):
+                return argv
+
+        prev, celeryd.WorkerCommand = celeryd.WorkerCommand, WorkerCommand
+        try:
+            ret = self.app.worker_main(argv=["--version"])
+            self.assertListEqual(ret, ["--version"])
+        finally:
+            celeryd.WorkerCommand = prev
+
+    def test_config_from_envvar(self):
+        os.environ["CELERYTEST_CONFIG_OBJECT"] = "celery.tests.test_app"
+        self.app.config_from_envvar("CELERYTEST_CONFIG_OBJECT")
+        self.assertEqual(self.app.conf.THIS_IS_A_KEY, "this is a value")
+
+    def test_config_from_object(self):
+
+        class Object(object):
+            LEAVE_FOR_WORK = True
+            MOMENT_TO_STOP = True
+            CALL_ME_BACK = 123456789
+            WANT_ME_TO = False
+            UNDERSTAND_ME = True
+
+        self.app.config_from_object(Object())
+
+        self.assertTrue(self.app.conf.LEAVE_FOR_WORK)
+        self.assertTrue(self.app.conf.MOMENT_TO_STOP)
+        self.assertEqual(self.app.conf.CALL_ME_BACK, 123456789)
+        self.assertFalse(self.app.conf.WANT_ME_TO)
+        self.assertTrue(self.app.conf.UNDERSTAND_ME)
+
+    def test_config_from_cmdline(self):
+        cmdline = [".always_eager=no",
+                   ".result_backend=/dev/null",
+                   '.task_error_whitelist=(list)["a", "b", "c"]',
+                   "celeryd.prefetch_multiplier=368",
+                   ".foobarstring=(string)300",
+                   ".foobarint=(int)300",
+                   '.result_engine_options=(dict){"foo": "bar"}']
+        self.app.config_from_cmdline(cmdline, namespace="celery")
+        self.assertFalse(self.app.conf.CELERY_ALWAYS_EAGER)
+        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "/dev/null")
+        self.assertEqual(self.app.conf.CELERYD_PREFETCH_MULTIPLIER, 368)
+        self.assertListEqual(self.app.conf.CELERY_TASK_ERROR_WHITELIST,
+                             ["a", "b", "c"])
+        self.assertEqual(self.app.conf.CELERY_FOOBARSTRING, "300")
+        self.assertEqual(self.app.conf.CELERY_FOOBARINT, 300)
+        self.assertDictEqual(self.app.conf.CELERY_RESULT_ENGINE_OPTIONS,
+                             {"foo": "bar"})
+
+    def test_compat_setting_CELERY_BACKEND(self):
+
+        self.app.config_from_object(Object(CELERY_BACKEND="set_by_us"))
+        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "set_by_us")
+
+    def test_setting_BROKER_TRANSPORT_OPTIONS(self):
+
+        _args = {'foo': 'bar', 'spam': 'baz'}
+
+        self.app.config_from_object(Object())
+        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, {})
+
+        self.app.config_from_object(Object(BROKER_TRANSPORT_OPTIONS=_args))
+        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, _args)
+
+    def test_Windows_log_color_disabled(self):
+        self.app.IS_WINDOWS = True
+        self.assertFalse(self.app.log.supports_color())
+
+    def test_compat_setting_CARROT_BACKEND(self):
+        self.app.config_from_object(Object(CARROT_BACKEND="set_by_us"))
+        self.assertEqual(self.app.conf.BROKER_TRANSPORT, "set_by_us")
+
+    def test_mail_admins(self):
+
+        class Loader(BaseLoader):
+
+            def mail_admins(*args, **kwargs):
+                return args, kwargs
+
+        self.app.loader = Loader()
+        self.app.conf.ADMINS = None
+        self.assertFalse(self.app.mail_admins("Subject", "Body"))
+        self.app.conf.ADMINS = [("George Costanza", "george@vandelay.com")]
+        self.assertTrue(self.app.mail_admins("Subject", "Body"))
+
+    def test_amqp_get_broker_info(self):
+        self.assertDictContainsSubset({"hostname": "localhost",
+                                       "userid": "guest",
+                                       "password": "guest",
+                                       "virtual_host": "/"},
+                                      self.app.broker_connection(
+                                          transport="amqplib").info())
+        self.app.conf.BROKER_PORT = 1978
+        self.app.conf.BROKER_VHOST = "foo"
+        self.assertDictContainsSubset({"port": 1978,
+                                       "virtual_host": "foo"},
+                                      self.app.broker_connection(
+                                          transport="amqplib").info())
+        conn = self.app.broker_connection(virtual_host="/value")
+        self.assertDictContainsSubset({"virtual_host": "/value"},
+                                      conn.info())
+
+    def test_BROKER_BACKEND_alias(self):
+        self.assertEqual(self.app.conf.BROKER_BACKEND,
+                         self.app.conf.BROKER_TRANSPORT)
+
+    def test_with_default_connection(self):
+
+        @self.app.with_default_connection
+        def handler(connection=None, foo=None):
+            return connection, foo
+
+        connection, foo = handler(foo=42)
+        self.assertEqual(foo, 42)
+        self.assertTrue(connection)
+
+    def test_after_fork(self):
+        p = self.app._pool = Mock()
+        self.app._after_fork(self.app)
+        p.force_close_all.assert_called_with()
+        self.assertIsNone(self.app._pool)
+        self.app._after_fork(self.app)
+
+    def test_pool_no_multiprocessing(self):
+        with mask_modules("multiprocessing.util"):
+            pool = self.app.pool
+            self.assertIs(pool, self.app._pool)
+
+    def test_bugreport(self):
+        self.assertTrue(self.app.bugreport())
+
+    def test_send_task_sent_event(self):
+        from celery.app import amqp
+
+        class Dispatcher(object):
+            sent = []
+
+            def send(self, type, **fields):
+                self.sent.append((type, fields))
+
+        conn = self.app.broker_connection()
+        chan = conn.channel()
+        try:
+            for e in ("foo_exchange", "moo_exchange", "bar_exchange"):
+                chan.exchange_declare(e, "direct", durable=True)
+                chan.queue_declare(e, durable=True)
+                chan.queue_bind(e, e, e)
+        finally:
+            chan.close()
+        assert conn.transport_cls == "memory"
+
+        pub = self.app.amqp.TaskPublisher(conn, exchange="foo_exchange")
+        self.assertIn("foo_exchange", amqp._exchanges_declared)
+
+        dispatcher = Dispatcher()
+        self.assertTrue(pub.delay_task("footask", (), {},
+                                       exchange="moo_exchange",
+                                       routing_key="moo_exchange",
+                                       event_dispatcher=dispatcher))
+        self.assertTrue(dispatcher.sent)
+        self.assertEqual(dispatcher.sent[0][0], "task-sent")
+        self.assertTrue(pub.delay_task("footask", (), {},
+                                       event_dispatcher=dispatcher,
+                                       exchange="bar_exchange",
+                                       routing_key="bar_exchange"))
+        self.assertIn("bar_exchange", amqp._exchanges_declared)
+
+    def test_error_mail_sender(self):
+        x = ErrorMail.subject % {"name": "task_name",
+                                 "id": gen_unique_id(),
+                                 "exc": "FOOBARBAZ",
+                                 "hostname": "lana"}
+        self.assertTrue(x)
+
+
+class test_BaseApp(unittest.TestCase):
+
+    def test_on_init(self):
+        BaseApp()
+
+
+class test_defaults(unittest.TestCase):
+
+    def test_str_to_bool(self):
+        for s in ("false", "no", "0"):
+            self.assertFalse(defaults.str_to_bool(s))
+        for s in ("true", "yes", "1"):
+            self.assertTrue(defaults.str_to_bool(s))
+        with self.assertRaises(TypeError):
+            defaults.str_to_bool("unsure")
+
+
+class test_debugging_utils(unittest.TestCase):
+
+    def test_enable_disable_trace(self):
+        try:
+            _app.enable_trace()
+            self.assertEqual(_app.app_or_default, _app._app_or_default_trace)
+            _app.disable_trace()
+            self.assertEqual(_app.app_or_default, _app._app_or_default)
+        finally:
+            _app.disable_trace()
+
+
+class test_compilation(unittest.TestCase):
+    _clean = ("celery.app.base", )
+
+    def setUp(self):
+        self._prev = dict((k, sys.modules.pop(k, None)) for k in self._clean)
+
+    def tearDown(self):
+        sys.modules.update(self._prev)
+
+    def test_kombu_version_check(self):
+        import kombu
+        kombu.VERSION = (0, 9, 9)
+        with self.assertRaises(ImportError):
+            __import__("celery.app.base")
+
+
+class test_pyimplementation(unittest.TestCase):
+
+    def test_platform_python_implementation(self):
+        with platform_pyimp(lambda: "Xython"):
+            self.assertEqual(pyimplementation(), "Xython")
+
+    def test_platform_jython(self):
+        with platform_pyimp():
+            with sys_platform("java 1.6.51"):
+                self.assertIn("Jython", pyimplementation())
+
+    def test_platform_pypy(self):
+        with platform_pyimp():
+            with sys_platform("darwin"):
+                with pypy_version((1, 4, 3)):
+                    self.assertIn("PyPy", pyimplementation())
+                with pypy_version((1, 4, 3, "a4")):
+                    self.assertIn("PyPy", pyimplementation())
+
+    def test_platform_fallback(self):
+        with platform_pyimp():
+            with sys_platform("darwin"):
+                with pypy_version():
+                    self.assertEqual("CPython", pyimplementation())

+ 0 - 313
celery/tests/test_app/test_app.py

@@ -1,313 +0,0 @@
-from __future__ import with_statement
-
-import os
-import sys
-
-from mock import Mock
-
-from celery import Celery
-from celery import app as _app
-from celery.app import defaults
-from celery.app.base import BaseApp, pyimplementation
-from celery.loaders.base import BaseLoader
-from celery.utils.serialization import pickle
-
-from celery.tests import config
-from celery.tests.utils import (unittest, mask_modules, platform_pyimp,
-                                sys_platform, pypy_version)
-
-THIS_IS_A_KEY = "this is a value"
-
-
-class Object(object):
-
-    def __init__(self, **kwargs):
-        for key, value in kwargs.items():
-            setattr(self, key, value)
-
-
-def _get_test_config():
-    return dict((key, getattr(config, key))
-                    for key in dir(config)
-                        if key.isupper() and not key.startswith("_"))
-
-test_config = _get_test_config()
-
-
-class test_App(unittest.TestCase):
-
-    def setUp(self):
-        self.app = Celery(set_as_current=False)
-        self.app.conf.update(test_config)
-
-    def test_task(self):
-        app = Celery("foozibari", set_as_current=False)
-
-        def fun():
-            pass
-
-        fun.__module__ = "__main__"
-        task = app.task(fun)
-        self.assertEqual(task.name, app.main + ".fun")
-
-    def test_repr(self):
-        self.assertTrue(repr(self.app))
-
-    def test_TaskSet(self):
-        ts = self.app.TaskSet()
-        self.assertListEqual(ts.tasks, [])
-        self.assertIs(ts.app, self.app)
-
-    def test_pickle_app(self):
-        changes = dict(THE_FOO_BAR="bars",
-                       THE_MII_MAR="jars")
-        self.app.conf.update(changes)
-        saved = pickle.dumps(self.app)
-        self.assertLess(len(saved), 2048)
-        restored = pickle.loads(saved)
-        self.assertDictContainsSubset(changes, restored.conf)
-
-    def test_worker_main(self):
-        from celery.bin import celeryd
-
-        class WorkerCommand(celeryd.WorkerCommand):
-
-            def execute_from_commandline(self, argv):
-                return argv
-
-        prev, celeryd.WorkerCommand = celeryd.WorkerCommand, WorkerCommand
-        try:
-            ret = self.app.worker_main(argv=["--version"])
-            self.assertListEqual(ret, ["--version"])
-        finally:
-            celeryd.WorkerCommand = prev
-
-    def test_config_from_envvar(self):
-        os.environ["CELERYTEST_CONFIG_OBJECT"] = \
-                "celery.tests.test_app.test_app"
-        self.app.config_from_envvar("CELERYTEST_CONFIG_OBJECT")
-        self.assertEqual(self.app.conf.THIS_IS_A_KEY, "this is a value")
-
-    def test_config_from_object(self):
-
-        class Object(object):
-            LEAVE_FOR_WORK = True
-            MOMENT_TO_STOP = True
-            CALL_ME_BACK = 123456789
-            WANT_ME_TO = False
-            UNDERSTAND_ME = True
-
-        self.app.config_from_object(Object())
-
-        self.assertTrue(self.app.conf.LEAVE_FOR_WORK)
-        self.assertTrue(self.app.conf.MOMENT_TO_STOP)
-        self.assertEqual(self.app.conf.CALL_ME_BACK, 123456789)
-        self.assertFalse(self.app.conf.WANT_ME_TO)
-        self.assertTrue(self.app.conf.UNDERSTAND_ME)
-
-    def test_config_from_cmdline(self):
-        cmdline = [".always_eager=no",
-                   ".result_backend=/dev/null",
-                   '.task_error_whitelist=(list)["a", "b", "c"]',
-                   "celeryd.prefetch_multiplier=368",
-                   ".foobarstring=(string)300",
-                   ".foobarint=(int)300",
-                   '.result_engine_options=(dict){"foo": "bar"}']
-        self.app.config_from_cmdline(cmdline, namespace="celery")
-        self.assertFalse(self.app.conf.CELERY_ALWAYS_EAGER)
-        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "/dev/null")
-        self.assertEqual(self.app.conf.CELERYD_PREFETCH_MULTIPLIER, 368)
-        self.assertListEqual(self.app.conf.CELERY_TASK_ERROR_WHITELIST,
-                             ["a", "b", "c"])
-        self.assertEqual(self.app.conf.CELERY_FOOBARSTRING, "300")
-        self.assertEqual(self.app.conf.CELERY_FOOBARINT, 300)
-        self.assertDictEqual(self.app.conf.CELERY_RESULT_ENGINE_OPTIONS,
-                             {"foo": "bar"})
-
-    def test_compat_setting_CELERY_BACKEND(self):
-
-        self.app.config_from_object(Object(CELERY_BACKEND="set_by_us"))
-        self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, "set_by_us")
-
-    def test_setting_BROKER_TRANSPORT_OPTIONS(self):
-
-        _args = {'foo': 'bar', 'spam': 'baz'}
-
-        self.app.config_from_object(Object())
-        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, {})
-
-        self.app.config_from_object(Object(BROKER_TRANSPORT_OPTIONS=_args))
-        self.assertEqual(self.app.conf.BROKER_TRANSPORT_OPTIONS, _args)
-
-    def test_Windows_log_color_disabled(self):
-        self.app.IS_WINDOWS = True
-        self.assertFalse(self.app.log.supports_color())
-
-    def test_compat_setting_CARROT_BACKEND(self):
-        self.app.config_from_object(Object(CARROT_BACKEND="set_by_us"))
-        self.assertEqual(self.app.conf.BROKER_TRANSPORT, "set_by_us")
-
-    def test_mail_admins(self):
-
-        class Loader(BaseLoader):
-
-            def mail_admins(*args, **kwargs):
-                return args, kwargs
-
-        self.app.loader = Loader()
-        self.app.conf.ADMINS = None
-        self.assertFalse(self.app.mail_admins("Subject", "Body"))
-        self.app.conf.ADMINS = [("George Costanza", "george@vandelay.com")]
-        self.assertTrue(self.app.mail_admins("Subject", "Body"))
-
-    def test_amqp_get_broker_info(self):
-        self.assertDictContainsSubset({"hostname": "localhost",
-                                       "userid": "guest",
-                                       "password": "guest",
-                                       "virtual_host": "/"},
-                                      self.app.broker_connection(
-                                          transport="amqplib").info())
-        self.app.conf.BROKER_PORT = 1978
-        self.app.conf.BROKER_VHOST = "foo"
-        self.assertDictContainsSubset({"port": 1978,
-                                       "virtual_host": "foo"},
-                                      self.app.broker_connection(
-                                          transport="amqplib").info())
-        conn = self.app.broker_connection(virtual_host="/value")
-        self.assertDictContainsSubset({"virtual_host": "/value"},
-                                      conn.info())
-
-    def test_BROKER_BACKEND_alias(self):
-        self.assertEqual(self.app.conf.BROKER_BACKEND,
-                         self.app.conf.BROKER_TRANSPORT)
-
-    def test_with_default_connection(self):
-
-        @self.app.with_default_connection
-        def handler(connection=None, foo=None):
-            return connection, foo
-
-        connection, foo = handler(foo=42)
-        self.assertEqual(foo, 42)
-        self.assertTrue(connection)
-
-    def test_after_fork(self):
-        p = self.app._pool = Mock()
-        self.app._after_fork(self.app)
-        p.force_close_all.assert_called_with()
-        self.assertIsNone(self.app._pool)
-        self.app._after_fork(self.app)
-
-    def test_pool_no_multiprocessing(self):
-        with mask_modules("multiprocessing.util"):
-            pool = self.app.pool
-            self.assertIs(pool, self.app._pool)
-
-    def test_bugreport(self):
-        self.assertTrue(self.app.bugreport())
-
-    def test_send_task_sent_event(self):
-        from celery.app import amqp
-
-        class Dispatcher(object):
-            sent = []
-
-            def send(self, type, **fields):
-                self.sent.append((type, fields))
-
-        conn = self.app.broker_connection()
-        chan = conn.channel()
-        try:
-            for e in ("foo_exchange", "moo_exchange", "bar_exchange"):
-                chan.exchange_declare(e, "direct", durable=True)
-                chan.queue_declare(e, durable=True)
-                chan.queue_bind(e, e, e)
-        finally:
-            chan.close()
-        assert conn.transport_cls == "memory"
-
-        pub = self.app.amqp.TaskPublisher(conn, exchange="foo_exchange")
-        self.assertIn("foo_exchange", amqp._exchanges_declared)
-
-        dispatcher = Dispatcher()
-        self.assertTrue(pub.delay_task("footask", (), {},
-                                       exchange="moo_exchange",
-                                       routing_key="moo_exchange",
-                                       event_dispatcher=dispatcher))
-        self.assertTrue(dispatcher.sent)
-        self.assertEqual(dispatcher.sent[0][0], "task-sent")
-        self.assertTrue(pub.delay_task("footask", (), {},
-                                       event_dispatcher=dispatcher,
-                                       exchange="bar_exchange",
-                                       routing_key="bar_exchange"))
-        self.assertIn("bar_exchange", amqp._exchanges_declared)
-
-
-class test_BaseApp(unittest.TestCase):
-
-    def test_on_init(self):
-        BaseApp()
-
-
-class test_defaults(unittest.TestCase):
-
-    def test_str_to_bool(self):
-        for s in ("false", "no", "0"):
-            self.assertFalse(defaults.str_to_bool(s))
-        for s in ("true", "yes", "1"):
-            self.assertTrue(defaults.str_to_bool(s))
-        self.assertRaises(TypeError, defaults.str_to_bool, "unsure")
-
-
-class test_debugging_utils(unittest.TestCase):
-
-    def test_enable_disable_trace(self):
-        try:
-            _app.enable_trace()
-            self.assertEqual(_app.app_or_default, _app._app_or_default_trace)
-            _app.disable_trace()
-            self.assertEqual(_app.app_or_default, _app._app_or_default)
-        finally:
-            _app.disable_trace()
-
-
-class test_compilation(unittest.TestCase):
-    _clean = ("celery.app.base", )
-
-    def setUp(self):
-        self._prev = dict((k, sys.modules.pop(k, None)) for k in self._clean)
-
-    def tearDown(self):
-        sys.modules.update(self._prev)
-
-    def test_kombu_version_check(self):
-        import kombu
-        kombu.VERSION = (0, 9, 9)
-        with self.assertRaises(ImportError):
-            __import__("celery.app.base")
-
-
-class test_pyimplementation(unittest.TestCase):
-
-    def test_platform_python_implementation(self):
-        with platform_pyimp(lambda: "Xython"):
-            self.assertEqual(pyimplementation(), "Xython")
-
-    def test_platform_jython(self):
-        with platform_pyimp():
-            with sys_platform("java 1.6.51"):
-                self.assertIn("Jython", pyimplementation())
-
-    def test_platform_pypy(self):
-        with platform_pyimp():
-            with sys_platform("darwin"):
-                with pypy_version((1, 4, 3)):
-                    self.assertIn("PyPy", pyimplementation())
-                with pypy_version((1, 4, 3, "a4")):
-                    self.assertIn("PyPy", pyimplementation())
-
-    def test_platform_fallback(self):
-        with platform_pyimp():
-            with sys_platform("darwin"):
-                with pypy_version():
-                    self.assertEqual("CPython", pyimplementation())

+ 4 - 4
celery/tests/test_app/test_app_amqp.py

@@ -1,10 +1,10 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 from mock import Mock
 
-from celery.tests.utils import AppCase
-
 from celery.app.amqp import MSG_OPTIONS, extract_msg_options
+from celery.tests.utils import AppCase
 
 
 class TestMsgOptions(AppCase):
@@ -24,10 +24,10 @@ class test_TaskPublisher(AppCase):
     def test__exit__(self):
 
         publisher = self.app.amqp.TaskPublisher(self.app.broker_connection())
-        publisher.close = Mock()
+        publisher.release = Mock()
         with publisher:
             pass
-        publisher.close.assert_called_with()
+        publisher.release.assert_called_with()
 
     def test_ensure_declare_queue(self, q="x1242112"):
         publisher = self.app.amqp.TaskPublisher(Mock())

+ 16 - 15
celery/tests/test_app/test_beat.py

@@ -1,8 +1,8 @@
+from __future__ import absolute_import
+
 import logging
-from celery.tests.utils import unittest
 
 from datetime import datetime, timedelta
-
 from nose import SkipTest
 
 from celery import beat
@@ -10,7 +10,8 @@ from celery import registry
 from celery.result import AsyncResult
 from celery.schedules import schedule
 from celery.task.base import Task
-from celery.utils import gen_unique_id
+from celery.utils import uuid
+from celery.tests.utils import unittest
 
 
 class Object(object):
@@ -104,7 +105,7 @@ class MockLogger(logging.Logger):
         logging.Logger.__init__(self, *args, **kwargs)
 
     def _log(self, level, msg, args, **kwargs):
-        self.logged.append((level, msg))
+        self.logged.append((level, msg, args, kwargs))
 
 
 class mScheduler(beat.Scheduler):
@@ -119,7 +120,7 @@ class mScheduler(beat.Scheduler):
                           "args": args,
                           "kwargs": kwargs,
                           "options": options})
-        return AsyncResult(gen_unique_id())
+        return AsyncResult(uuid())
 
 
 class mSchedulerSchedulingError(mScheduler):
@@ -189,9 +190,9 @@ class test_Scheduler(unittest.TestCase):
                       schedule=always_due)
         self.assertEqual(scheduler.tick(), 1)
         self.assertTrue(scheduler.logger.logged[0])
-        level, msg = scheduler.logger.logged[0]
+        level, msg, args, kwargs = scheduler.logger.logged[0]
         self.assertEqual(level, logging.ERROR)
-        self.assertIn("Couldn't apply scheduled task", msg)
+        self.assertIn("Couldn't apply scheduled task", args[0].args[0])
 
     def test_due_tick_RuntimeError(self):
         scheduler = mSchedulerRuntimeError()
@@ -262,7 +263,7 @@ class test_Service(unittest.TestCase):
                 if self.tick_raises_exit:
                     raise SystemExit()
                 if self.shutdown_service:
-                    self.shutdown_service._shutdown.set()
+                    self.shutdown_service._is_shutdown.set()
                 return 0.0
 
         return beat.Service(scheduler_cls=PersistentScheduler), sh
@@ -279,12 +280,12 @@ class test_Service(unittest.TestCase):
         s.sync()
         self.assertTrue(sh.closed)
         self.assertTrue(sh.synced)
-        self.assertTrue(s._stopped.isSet())
+        self.assertTrue(s._is_stopped.isSet())
         s.sync()
         s.stop(wait=False)
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
         s.stop(wait=True)
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
 
         p = s.scheduler._store
         s.scheduler._store = None
@@ -295,25 +296,25 @@ class test_Service(unittest.TestCase):
 
     def test_start_embedded_process(self):
         s, sh = self.get_service()
-        s._shutdown.set()
+        s._is_shutdown.set()
         s.start(embedded_process=True)
 
     def test_start_thread(self):
         s, sh = self.get_service()
-        s._shutdown.set()
+        s._is_shutdown.set()
         s.start(embedded_process=False)
 
     def test_start_tick_raises_exit_error(self):
         s, sh = self.get_service()
         s.scheduler.tick_raises_exit = True
         s.start()
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
 
     def test_start_manages_one_tick_before_shutdown(self):
         s, sh = self.get_service()
         s.scheduler.shutdown_service = s
         s.start()
-        self.assertTrue(s._shutdown.isSet())
+        self.assertTrue(s._is_shutdown.isSet())
 
 
 class test_EmbeddedService(unittest.TestCase):

+ 1 - 0
celery/tests/test_app/test_celery.py

@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from celery.tests.utils import unittest
 
 import celery

+ 30 - 19
celery/tests/test_app/test_loaders.py

@@ -1,18 +1,20 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import os
 import sys
+import warnings
 
 from celery import task
 from celery import loaders
 from celery.app import app_or_default
-from celery.exceptions import ImproperlyConfigured
+from celery.exceptions import CPendingDeprecationWarning, ImproperlyConfigured
 from celery.loaders import base
 from celery.loaders import default
 from celery.loaders.app import AppLoader
 
 from celery.tests.compat import catch_warnings
-from celery.tests.utils import unittest, AppCase, with_environ
+from celery.tests.utils import unittest, AppCase
 
 
 class ObjectConfig(object):
@@ -66,14 +68,22 @@ class TestLoaders(AppCase):
                           default.Loader)
 
     def test_current_loader(self):
-        self.assertIs(loaders.current_loader(), self.app.loader)
+        warnings.resetwarnings()
+        with catch_warnings(record=True) as log:
+            self.assertIs(loaders.current_loader(), self.app.loader)
+            warning = log[0].message
+
+            self.assertIsInstance(warning, CPendingDeprecationWarning)
+            self.assertIn("deprecation", warning.args[0])
 
     def test_load_settings(self):
-        self.assertIs(loaders.load_settings(), self.app.conf)
+        warnings.resetwarnings()
+        with catch_warnings(record=True) as log:
+            self.assertIs(loaders.load_settings(), self.app.conf)
+            warning = log[0].message
 
-    @with_environ("CELERY_LOADER", "default")
-    def test_detect_loader_CELERY_LOADER(self):
-        self.assertIsInstance(loaders.setup_loader(), default.Loader)
+            self.assertIsInstance(warning, CPendingDeprecationWarning)
+            self.assertIn("deprecation", warning.args[0])
 
 
 class TestLoaderBase(unittest.TestCase):
@@ -103,12 +113,14 @@ class TestLoaderBase(unittest.TestCase):
         self.assertEqual(self.loader.conf["foo"], "bar")
 
     def test_import_default_modules(self):
-        self.assertEqual(sorted(self.loader.import_default_modules()),
-                         sorted([os, sys, task]))
+        modnames = lambda l: [m.__name__ for m in l]
+        self.assertEqual(sorted(modnames(
+                            self.loader.import_default_modules())),
+                         sorted(modnames([os, sys, task])))
 
     def test_import_from_cwd_custom_imp(self):
 
-        def imp(module):
+        def imp(module, package=None):
             imp.called = True
         imp.called = False
 
@@ -126,8 +138,8 @@ class TestLoaderBase(unittest.TestCase):
             self.assertIsInstance(warning, MockMail.SendmailWarning)
             self.assertIn("KeyError", warning.args[0])
 
-            self.assertRaises(KeyError, self.loader.mail_admins,
-                              fail_silently=False, **opts)
+            with self.assertRaises(KeyError):
+                self.loader.mail_admins(fail_silently=False, **opts)
 
     def test_mail_admins(self):
         MockMail.Mailer.raise_on_send = False
@@ -143,8 +155,8 @@ class TestLoaderBase(unittest.TestCase):
         self.assertIs(loader.mail, mail)
 
     def test_cmdline_config_ValueError(self):
-        self.assertRaises(ValueError, self.loader.cmdline_config_parser,
-                         ["broker.port=foobar"])
+        with self.assertRaises(ValueError):
+            self.loader.cmdline_config_parser(["broker.port=foobar"])
 
 
 class TestDefaultLoader(unittest.TestCase):
@@ -220,17 +232,16 @@ class test_AppLoader(unittest.TestCase):
     def test_config_from_envvar(self, key="CELERY_HARNESS_CFG1"):
         self.assertFalse(self.loader.config_from_envvar("HDSAJIHWIQHEWQU",
                                                         silent=True))
-        self.assertRaises(ImproperlyConfigured,
-                          self.loader.config_from_envvar, "HDSAJIHWIQHEWQU",
-                          silent=False)
+        with self.assertRaises(ImproperlyConfigured):
+            self.loader.config_from_envvar("HDSAJIHWIQHEWQU", silent=False)
         os.environ[key] = __name__ + ".object_config"
         self.assertTrue(self.loader.config_from_envvar(key))
         self.assertEqual(self.loader.conf["FOO"], 1)
         self.assertEqual(self.loader.conf["BAR"], 2)
 
         os.environ[key] = "unknown_asdwqe.asdwqewqe"
-        self.assertRaises(ImportError,
-                          self.loader.config_from_envvar, key, silent=False)
+        with self.assertRaises(ImportError):
+            self.loader.config_from_envvar(key, silent=False)
         self.assertFalse(self.loader.config_from_envvar(key, silent=True))
 
         os.environ[key] = __name__ + ".dict_config"

+ 5 - 4
celery/tests/test_compat/test_log.py → celery/tests/test_app/test_log.py

@@ -1,8 +1,8 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
 import logging
-from celery.tests.utils import unittest
 from tempfile import mktemp
 
 from celery import log
@@ -10,8 +10,9 @@ from celery.log import (setup_logger, setup_task_logger,
                         get_default_logger, get_task_logger,
                         redirect_stdouts_to_logger, LoggingProxy,
                         setup_logging_subsystem)
-from celery.utils import gen_unique_id
+from celery.utils import uuid
 from celery.utils.compat import _CompatLoggerAdapter
+from celery.tests.utils import unittest
 from celery.tests.utils import (override_stdouts, wrap_logger,
                                 get_handlers, set_handlers)
 
@@ -119,7 +120,7 @@ class test_task_logger(test_default_logger):
         logger = get_task_logger()
         logger.handlers = []
         logging.root.manager.loggerDict.pop(logger.name, None)
-        self.uid = gen_unique_id()
+        self.uid = uuid()
 
     def setup_logger(self, *args, **kwargs):
         return setup_task_logger(*args, **dict(kwargs, task_name=self.uid,
@@ -154,7 +155,7 @@ class test_CompatLoggerAdapter(unittest.TestCase):
         self.logger, self.adapter = self.createAdapter()
 
     def createAdapter(self, name=None, extra={"foo": "bar"}):
-        logger = MockLogger(name=name or gen_unique_id())
+        logger = MockLogger(name=name or uuid())
         return logger, _CompatLoggerAdapter(logger, extra)
 
     def test_levels(self):

+ 8 - 4
celery/tests/test_app/test_routes.py

@@ -1,3 +1,6 @@
+from __future__ import absolute_import
+from __future__ import with_statement
+
 from functools import wraps
 
 from celery import routes
@@ -65,7 +68,8 @@ class test_MapRoute(unittest.TestCase):
     def test_expand_route_not_found(self):
         expand = E(current_app.conf.CELERY_QUEUES)
         route = routes.MapRoute({"a": {"queue": "x"}})
-        self.assertRaises(QueueNotFound, expand, route.route_for_task("a"))
+        with self.assertRaises(QueueNotFound):
+            expand(route.route_for_task("a"))
 
 
 class test_lookup_route(unittest.TestCase):
@@ -124,14 +128,14 @@ class test_lookup_route(unittest.TestCase):
 class test_prepare(unittest.TestCase):
 
     def test_prepare(self):
-        from celery.datastructures import LocalCache
+        from celery.datastructures import LRUCache
         o = object()
         R = [{"foo": "bar"},
-                  "celery.datastructures.LocalCache",
+                  "celery.datastructures.LRUCache",
                   o]
         p = routes.prepare(R)
         self.assertIsInstance(p[0], routes.MapRoute)
-        self.assertIsInstance(maybe_promise(p[1]), LocalCache)
+        self.assertIsInstance(maybe_promise(p[1]), LRUCache)
         self.assertIs(p[2], o)
 
         self.assertEqual(routes.prepare(o), [o])

+ 9 - 7
celery/tests/test_backends/__init__.py

@@ -1,10 +1,10 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
-from celery.tests.utils import unittest
-
 from celery import backends
 from celery.backends.amqp import AMQPBackend
 from celery.backends.cache import CacheBackend
+from celery.tests.utils import unittest
 
 
 class TestBackends(unittest.TestCase):
@@ -17,11 +17,13 @@ class TestBackends(unittest.TestCase):
                                   expect_cls)
 
     def test_get_backend_cache(self):
-        backends._backend_cache = {}
-        backends.get_backend_cls("amqp")
-        self.assertIn("amqp", backends._backend_cache)
-        amqp_backend = backends.get_backend_cls("amqp")
-        self.assertIs(amqp_backend, backends._backend_cache["amqp"])
+        backends.get_backend_cls.clear()
+        hits = backends.get_backend_cls.hits
+        misses = backends.get_backend_cls.misses
+        self.assertTrue(backends.get_backend_cls("amqp"))
+        self.assertEqual(backends.get_backend_cls.misses, misses + 1)
+        self.assertTrue(backends.get_backend_cls("amqp"))
+        self.assertEqual(backends.get_backend_cls.hits, hits + 1)
 
     def test_unknown_backend(self):
         with self.assertRaises(ValueError):

+ 71 - 64
celery/tests/test_backends/test_amqp.py

@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import socket
@@ -12,7 +13,7 @@ from celery.app import app_or_default
 from celery.backends.amqp import AMQPBackend
 from celery.datastructures import ExceptionInfo
 from celery.exceptions import TimeoutError
-from celery.utils import gen_unique_id
+from celery.utils import uuid
 
 from celery.tests.utils import unittest
 from celery.tests.utils import sleepdeprived
@@ -34,7 +35,7 @@ class test_AMQPBackend(unittest.TestCase):
         tb1 = self.create_backend()
         tb2 = self.create_backend()
 
-        tid = gen_unique_id()
+        tid = uuid()
 
         tb1.mark_as_done(tid, 42)
         self.assertEqual(tb2.get_status(tid), states.SUCCESS)
@@ -46,7 +47,7 @@ class test_AMQPBackend(unittest.TestCase):
         tb1 = self.create_backend()
         tb2 = self.create_backend()
 
-        tid2 = gen_unique_id()
+        tid2 = uuid()
         result = {"foo": "baz", "bar": SomeClass(12345)}
         tb1.mark_as_done(tid2, result)
         # is serialized properly.
@@ -58,28 +59,28 @@ class test_AMQPBackend(unittest.TestCase):
         tb1 = self.create_backend()
         tb2 = self.create_backend()
 
-        tid3 = gen_unique_id()
+        tid3 = uuid()
         try:
             raise KeyError("foo")
         except KeyError, exception:
             einfo = ExceptionInfo(sys.exc_info())
-        tb1.mark_as_failure(tid3, exception, traceback=einfo.traceback)
-        self.assertEqual(tb2.get_status(tid3), states.FAILURE)
-        self.assertIsInstance(tb2.get_result(tid3), KeyError)
-        self.assertEqual(tb2.get_traceback(tid3), einfo.traceback)
+            tb1.mark_as_failure(tid3, exception, traceback=einfo.traceback)
+            self.assertEqual(tb2.get_status(tid3), states.FAILURE)
+            self.assertIsInstance(tb2.get_result(tid3), KeyError)
+            self.assertEqual(tb2.get_traceback(tid3), einfo.traceback)
 
     def test_repair_uuid(self):
         from celery.backends.amqp import repair_uuid
         for i in range(10):
-            uuid = gen_unique_id()
-            self.assertEqual(repair_uuid(uuid.replace("-", "")), uuid)
+            tid = uuid()
+            self.assertEqual(repair_uuid(tid.replace("-", "")), tid)
 
-    def test_expires_defaults_to_config(self):
+    def test_expires_defaults_to_config_deprecated_setting(self):
         app = app_or_default()
         prev = app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES
         app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = 10
         try:
-            b = self.create_backend(expires=None)
+            b = self.create_backend()
             self.assertEqual(b.queue_arguments.get("x-expires"), 10 * 1000.0)
         finally:
             app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = prev
@@ -111,18 +112,18 @@ class test_AMQPBackend(unittest.TestCase):
             Producer = _Producer
 
         backend = Backend()
-        self.assertRaises(KeyError, backend.store_result,
-                          "foo", "bar", "STARTED", max_retries=None)
+        with self.assertRaises(KeyError):
+            backend.store_result("foo", "bar", "STARTED", max_retries=None)
 
-        self.assertRaises(KeyError, backend.store_result,
-                          "foo", "bar", "STARTED", max_retries=10)
+        with self.assertRaises(KeyError):
+            backend.store_result("foo", "bar", "STARTED", max_retries=10)
 
     def assertState(self, retval, state):
         self.assertEqual(retval["status"], state)
 
     def test_poll_no_messages(self):
         b = self.create_backend()
-        self.assertState(b.poll(gen_unique_id()), states.PENDING)
+        self.assertState(b.poll(uuid()), states.PENDING)
 
     def test_poll_result(self):
 
@@ -160,41 +161,45 @@ class test_AMQPBackend(unittest.TestCase):
         results.put(Message(status=states.RECEIVED, seq=1))
         results.put(Message(status=states.STARTED, seq=2))
         results.put(Message(status=states.FAILURE, seq=3))
-        r1 = backend.poll(gen_unique_id())
+        r1 = backend.poll(uuid())
         self.assertDictContainsSubset({"status": states.FAILURE,
                                        "seq": 3}, r1,
                                        "FFWDs to the last state")
 
         # Caches last known state.
         results.put(Message())
-        uuid = gen_unique_id()
-        backend.poll(uuid)
-        self.assertIn(uuid, backend._cache, "Caches last known state")
+        tid = uuid()
+        backend.poll(tid)
+        self.assertIn(tid, backend._cache, "Caches last known state")
 
         # Returns cache if no new states.
         results.queue.clear()
         assert not results.qsize()
-        backend._cache[uuid] = "hello"
-        self.assertEqual(backend.poll(uuid), "hello",
+        backend._cache[tid] = "hello"
+        self.assertEqual(backend.poll(tid), "hello",
                          "Returns cache if no new states")
 
     def test_wait_for(self):
         b = self.create_backend()
 
-        uuid = gen_unique_id()
-        self.assertRaises(TimeoutError, b.wait_for, uuid, timeout=0.1)
-        b.store_result(uuid, None, states.STARTED)
-        self.assertRaises(TimeoutError, b.wait_for, uuid, timeout=0.1)
-        b.store_result(uuid, None, states.RETRY)
-        self.assertRaises(TimeoutError, b.wait_for, uuid, timeout=0.1)
-        b.store_result(uuid, 42, states.SUCCESS)
-        self.assertEqual(b.wait_for(uuid, timeout=1), 42)
-        b.store_result(uuid, 56, states.SUCCESS)
-        self.assertEqual(b.wait_for(uuid, timeout=1), 42,
+        tid = uuid()
+        with self.assertRaises(TimeoutError):
+            b.wait_for(tid, timeout=0.1)
+        b.store_result(tid, None, states.STARTED)
+        with self.assertRaises(TimeoutError):
+            b.wait_for(tid, timeout=0.1)
+        b.store_result(tid, None, states.RETRY)
+        with self.assertRaises(TimeoutError):
+            b.wait_for(tid, timeout=0.1)
+        b.store_result(tid, 42, states.SUCCESS)
+        self.assertEqual(b.wait_for(tid, timeout=1), 42)
+        b.store_result(tid, 56, states.SUCCESS)
+        self.assertEqual(b.wait_for(tid, timeout=1), 42,
                          "result is cached")
-        self.assertEqual(b.wait_for(uuid, timeout=1, cache=False), 56)
-        b.store_result(uuid, KeyError("foo"), states.FAILURE)
-        self.assertRaises(KeyError, b.wait_for, uuid, timeout=1, cache=False)
+        self.assertEqual(b.wait_for(tid, timeout=1, cache=False), 56)
+        b.store_result(tid, KeyError("foo"), states.FAILURE)
+        with self.assertRaises(KeyError):
+            b.wait_for(tid, timeout=1, cache=False)
 
     def test_drain_events_remaining_timeouts(self):
 
@@ -205,33 +210,33 @@ class test_AMQPBackend(unittest.TestCase):
 
         b = self.create_backend()
         with current_app.pool.acquire_channel(block=False) as (_, channel):
-            binding = b._create_binding(gen_unique_id())
+            binding = b._create_binding(uuid())
             consumer = b._create_consumer(binding, channel)
-            self.assertRaises(socket.timeout, b.drain_events,
-                              Connection(), consumer, timeout=0.1)
+            with self.assertRaises(socket.timeout):
+                b.drain_events(Connection(), consumer, timeout=0.1)
 
     def test_get_many(self):
         b = self.create_backend()
 
-        uuids = []
+        tids = []
         for i in xrange(10):
-            uuid = gen_unique_id()
-            b.store_result(uuid, i, states.SUCCESS)
-            uuids.append(uuid)
+            tid = uuid()
+            b.store_result(tid, i, states.SUCCESS)
+            tids.append(tid)
 
-        res = list(b.get_many(uuids, timeout=1))
-        expected_results = [(uuid, {"status": states.SUCCESS,
+        res = list(b.get_many(tids, timeout=1))
+        expected_results = [(tid, {"status": states.SUCCESS,
                                     "result": i,
                                     "traceback": None,
-                                    "task_id": uuid})
-                                for i, uuid in enumerate(uuids)]
+                                    "task_id": tid})
+                                for i, tid in enumerate(tids)]
         self.assertEqual(sorted(res), sorted(expected_results))
         self.assertDictEqual(b._cache[res[0][0]], res[0][1])
-        cached_res = list(b.get_many(uuids, timeout=1))
+        cached_res = list(b.get_many(tids, timeout=1))
         self.assertEqual(sorted(cached_res), sorted(expected_results))
         b._cache[res[0][0]]["status"] = states.RETRY
-        self.assertRaises(socket.timeout, list,
-                          b.get_many(uuids, timeout=0.01))
+        with self.assertRaises(socket.timeout):
+            list(b.get_many(tids, timeout=0.01))
 
     def test_test_get_many_raises_outer_block(self):
 
@@ -241,7 +246,8 @@ class test_AMQPBackend(unittest.TestCase):
                 raise KeyError("foo")
 
         b = Backend()
-        self.assertRaises(KeyError, b.get_many(["id1"]).next)
+        with self.assertRaises(KeyError):
+            b.get_many(["id1"]).next()
 
     def test_test_get_many_raises_inner_block(self):
 
@@ -251,7 +257,8 @@ class test_AMQPBackend(unittest.TestCase):
                 raise KeyError("foo")
 
         b = Backend()
-        self.assertRaises(KeyError, b.get_many(["id1"]).next)
+        with self.assertRaises(KeyError):
+            b.get_many(["id1"]).next()
 
     def test_no_expires(self):
         b = self.create_backend(expires=None)
@@ -260,8 +267,8 @@ class test_AMQPBackend(unittest.TestCase):
         app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = None
         try:
             b = self.create_backend(expires=None)
-            self.assertRaises(KeyError, b.queue_arguments.__getitem__,
-                              "x-expires")
+            with self.assertRaises(KeyError):
+                b.queue_arguments["x-expires"]
         finally:
             app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = prev
 
@@ -269,21 +276,21 @@ class test_AMQPBackend(unittest.TestCase):
         self.create_backend().process_cleanup()
 
     def test_reload_task_result(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().reload_task_result, "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().reload_task_result("x")
 
     def test_reload_taskset_result(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().reload_taskset_result, "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().reload_taskset_result("x")
 
     def test_save_taskset(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().save_taskset, "x", "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().save_taskset("x", "x")
 
     def test_restore_taskset(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().restore_taskset, "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().restore_taskset("x")
 
     def test_delete_taskset(self):
-        self.assertRaises(NotImplementedError,
-                          self.create_backend().delete_taskset, "x")
+        with self.assertRaises(NotImplementedError):
+            self.create_backend().delete_taskset("x")

+ 46 - 40
celery/tests/test_backends/test_base.py

@@ -1,9 +1,11 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
 import types
 
 from mock import Mock
+from nose import SkipTest
 
 from celery.utils import serialization
 from celery.utils.serialization import subclass_exception
@@ -15,7 +17,7 @@ from celery.utils.serialization import get_pickleable_exception as gpe
 from celery import states
 from celery.backends.base import BaseBackend, KeyValueStoreBackend
 from celery.backends.base import BaseDictBackend, DisabledBackend
-from celery.utils import gen_unique_id
+from celery.utils import uuid
 
 from celery.tests.utils import unittest
 
@@ -25,8 +27,10 @@ class wrapobject(object):
     def __init__(self, *args, **kwargs):
         self.args = args
 
-
-Oldstyle = types.ClassType("Oldstyle", (), {})
+if sys.version_info >= (3, 0):
+    Oldstyle = None
+else:
+    Oldstyle = types.ClassType("Oldstyle", (), {})
 Unpickleable = subclass_exception("Unpickleable", KeyError, "foo.module")
 Impossible = subclass_exception("Impossible", object, "foo.module")
 Lookalike = subclass_exception("Lookalike", wrapobject, "foo.module")
@@ -45,52 +49,52 @@ class test_serialization(unittest.TestCase):
 class test_BaseBackend_interface(unittest.TestCase):
 
     def test_get_status(self):
-        self.assertRaises(NotImplementedError,
-                b.get_status, "SOMExx-N0Nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.get_status("SOMExx-N0Nex1stant-IDxx-")
 
     def test__forget(self):
-        self.assertRaises(NotImplementedError,
-                b.forget, "SOMExx-N0Nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.forget("SOMExx-N0Nex1stant-IDxx-")
 
     def test_store_result(self):
-        self.assertRaises(NotImplementedError,
-                b.store_result, "SOMExx-N0nex1stant-IDxx-", 42, states.SUCCESS)
+        with self.assertRaises(NotImplementedError):
+            b.store_result("SOMExx-N0nex1stant-IDxx-", 42, states.SUCCESS)
 
     def test_mark_as_started(self):
-        self.assertRaises(NotImplementedError,
-                b.mark_as_started, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.mark_as_started("SOMExx-N0nex1stant-IDxx-")
 
     def test_reload_task_result(self):
-        self.assertRaises(NotImplementedError,
-                b.reload_task_result, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.reload_task_result("SOMExx-N0nex1stant-IDxx-")
 
     def test_reload_taskset_result(self):
-        self.assertRaises(NotImplementedError,
-                b.reload_taskset_result, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.reload_taskset_result("SOMExx-N0nex1stant-IDxx-")
 
     def test_get_result(self):
-        self.assertRaises(NotImplementedError,
-                b.get_result, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.get_result("SOMExx-N0nex1stant-IDxx-")
 
     def test_restore_taskset(self):
-        self.assertRaises(NotImplementedError,
-                b.restore_taskset, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.restore_taskset("SOMExx-N0nex1stant-IDxx-")
 
     def test_delete_taskset(self):
-        self.assertRaises(NotImplementedError,
-                b.delete_taskset, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.delete_taskset("SOMExx-N0nex1stant-IDxx-")
 
     def test_save_taskset(self):
-        self.assertRaises(NotImplementedError,
-                b.save_taskset, "SOMExx-N0nex1stant-IDxx-", "blergh")
+        with self.assertRaises(NotImplementedError):
+            b.save_taskset("SOMExx-N0nex1stant-IDxx-", "blergh")
 
     def test_get_traceback(self):
-        self.assertRaises(NotImplementedError,
-                b.get_traceback, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.get_traceback("SOMExx-N0nex1stant-IDxx-")
 
     def test_forget(self):
-        self.assertRaises(NotImplementedError,
-                b.forget, "SOMExx-N0nex1stant-IDxx-")
+        with self.assertRaises(NotImplementedError):
+            b.forget("SOMExx-N0nex1stant-IDxx-")
 
     def test_on_chord_apply(self, unlock="celery.chord_unlock"):
         from celery.registry import tasks
@@ -105,6 +109,8 @@ class test_BaseBackend_interface(unittest.TestCase):
 class test_exception_pickle(unittest.TestCase):
 
     def test_oldstyle(self):
+        if Oldstyle is None:
+            raise SkipTest("py3k does not support old style classes")
         self.assertIsNone(fnpe(Oldstyle()))
 
     def test_BaseException(self):
@@ -230,7 +236,7 @@ class test_KeyValueStoreBackend(unittest.TestCase):
         self.b = KVBackend()
 
     def test_get_store_delete_result(self):
-        tid = gen_unique_id()
+        tid = uuid()
         self.b.mark_as_done(tid, "Hello world")
         self.assertEqual(self.b.get_result(tid), "Hello world")
         self.assertEqual(self.b.get_status(tid), states.SUCCESS)
@@ -245,7 +251,7 @@ class test_KeyValueStoreBackend(unittest.TestCase):
     def test_get_many(self):
         for is_dict in True, False:
             self.b.mget_returns_dict = is_dict
-            ids = dict((gen_unique_id(), i) for i in xrange(10))
+            ids = dict((uuid(), i) for i in xrange(10))
             for id, i in ids.items():
                 self.b.mark_as_done(id, i)
             it = self.b.get_many(ids.keys())
@@ -259,7 +265,7 @@ class test_KeyValueStoreBackend(unittest.TestCase):
         self.assertEqual(self.b.get_status("xxx-missing"), states.PENDING)
 
     def test_save_restore_delete_taskset(self):
-        tid = gen_unique_id()
+        tid = uuid()
         self.b.save_taskset(tid, "Hello world")
         self.assertEqual(self.b.restore_taskset(tid), "Hello world")
         self.b.delete_taskset(tid)
@@ -272,27 +278,27 @@ class test_KeyValueStoreBackend(unittest.TestCase):
 class test_KeyValueStoreBackend_interface(unittest.TestCase):
 
     def test_get(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().get,
-                "a")
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().get("a")
 
     def test_set(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().set,
-                "a", 1)
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().set("a", 1)
 
     def test_cleanup(self):
         self.assertFalse(KeyValueStoreBackend().cleanup())
 
     def test_delete(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().delete,
-                "a")
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().delete("a")
 
     def test_mget(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().mget,
-                ["a"])
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().mget(["a"])
 
     def test_forget(self):
-        self.assertRaises(NotImplementedError, KeyValueStoreBackend().forget,
-                "a")
+        with self.assertRaises(NotImplementedError):
+            KeyValueStoreBackend().forget("a")
 
 
 class test_DisabledBackend(unittest.TestCase):

+ 74 - 14
celery/tests/test_backends/test_cache.py

@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
@@ -9,7 +10,8 @@ from celery import states
 from celery.backends.cache import CacheBackend, DummyClient
 from celery.exceptions import ImproperlyConfigured
 from celery.result import AsyncResult
-from celery.utils import gen_unique_id
+from celery.utils import uuid
+from celery.utils.encoding import str_to_bytes
 
 from celery.tests.utils import unittest, mask_modules, reset_modules
 
@@ -25,7 +27,7 @@ class test_CacheBackend(unittest.TestCase):
     def test_mark_as_done(self):
         tb = CacheBackend(backend="memory://")
 
-        tid = gen_unique_id()
+        tid = uuid()
 
         self.assertEqual(tb.get_status(tid), states.PENDING)
         self.assertIsNone(tb.get_result(tid))
@@ -37,7 +39,7 @@ class test_CacheBackend(unittest.TestCase):
     def test_is_pickled(self):
         tb = CacheBackend(backend="memory://")
 
-        tid2 = gen_unique_id()
+        tid2 = uuid()
         result = {"foo": "baz", "bar": SomeClass(12345)}
         tb.mark_as_done(tid2, result)
         # is serialized properly.
@@ -48,14 +50,14 @@ class test_CacheBackend(unittest.TestCase):
     def test_mark_as_failure(self):
         tb = CacheBackend(backend="memory://")
 
-        tid3 = gen_unique_id()
+        tid3 = uuid()
         try:
             raise KeyError("foo")
         except KeyError, exception:
             pass
-        tb.mark_as_failure(tid3, exception)
-        self.assertEqual(tb.get_status(tid3), states.FAILURE)
-        self.assertIsInstance(tb.get_result(tid3), KeyError)
+            tb.mark_as_failure(tid3, exception)
+            self.assertEqual(tb.get_status(tid3), states.FAILURE)
+            self.assertIsInstance(tb.get_result(tid3), KeyError)
 
     def test_mget(self):
         tb = CacheBackend(backend="memory://")
@@ -67,7 +69,7 @@ class test_CacheBackend(unittest.TestCase):
 
     def test_forget(self):
         tb = CacheBackend(backend="memory://")
-        tid = gen_unique_id()
+        tid = uuid()
         tb.mark_as_done(tid, {"foo": "bar"})
         x = AsyncResult(tid, backend=tb)
         x.forget()
@@ -82,20 +84,30 @@ class test_CacheBackend(unittest.TestCase):
         self.assertEqual(tb.expires, 10)
 
     def test_unknown_backend_raises_ImproperlyConfigured(self):
-        self.assertRaises(ImproperlyConfigured,
-                          CacheBackend, backend="unknown://")
+        with self.assertRaises(ImproperlyConfigured):
+            CacheBackend(backend="unknown://")
 
 
-class MyClient(DummyClient):
+class MyMemcachedStringEncodingError(Exception):
     pass
 
 
-class test_get_best_memcache(unittest.TestCase):
+class MemcachedClient(DummyClient):
+
+    def set(self, key, value, *args, **kwargs):
+        if isinstance(key, unicode):
+            raise MyMemcachedStringEncodingError(
+                    "Keys must be str()'s, not unicode.  Convert your unicode "
+                    "strings using mystring.encode(charset)!")
+        return super(MemcachedClient, self).set(key, value, *args, **kwargs)
+
+
+class MockCacheMixin(object):
 
     @contextmanager
     def mock_memcache(self):
         memcache = types.ModuleType("memcache")
-        memcache.Client = MyClient
+        memcache.Client = MemcachedClient
         memcache.Client.__module__ = memcache.__name__
         prev, sys.modules["memcache"] = sys.modules.get("memcache"), memcache
         yield True
@@ -105,7 +117,7 @@ class test_get_best_memcache(unittest.TestCase):
     @contextmanager
     def mock_pylibmc(self):
         pylibmc = types.ModuleType("pylibmc")
-        pylibmc.Client = MyClient
+        pylibmc.Client = MemcachedClient
         pylibmc.Client.__module__ = pylibmc.__name__
         prev = sys.modules.get("pylibmc")
         sys.modules["pylibmc"] = pylibmc
@@ -113,6 +125,9 @@ class test_get_best_memcache(unittest.TestCase):
         if prev is not None:
             sys.modules["pylibmc"] = prev
 
+
+class test_get_best_memcache(unittest.TestCase, MockCacheMixin):
+
     def test_pylibmc(self):
         with reset_modules("celery.backends.cache"):
             with self.mock_pylibmc():
@@ -150,3 +165,48 @@ class test_get_best_memcache(unittest.TestCase):
         from celery.backends.cache import backends
         for name, fun in backends.items():
             self.assertTrue(fun())
+
+
+class test_memcache_key(unittest.TestCase, MockCacheMixin):
+
+    def test_memcache_unicode_key(self):
+        with self.mock_memcache():
+            with reset_modules("celery.backends.cache"):
+                with mask_modules("pylibmc"):
+                    from celery.backends import cache
+                    cache._imp = [None]
+                    task_id, result = unicode(uuid()), 42
+                    b = cache.CacheBackend(backend='memcache')
+                    b.store_result(task_id, result, status=states.SUCCESS)
+                    self.assertEqual(b.get_result(task_id), result)
+
+    def test_memcache_bytes_key(self):
+        with self.mock_memcache():
+            with reset_modules("celery.backends.cache"):
+                with mask_modules("pylibmc"):
+                    from celery.backends import cache
+                    cache._imp = [None]
+                    task_id, result = str_to_bytes(uuid()), 42
+                    b = cache.CacheBackend(backend='memcache')
+                    b.store_result(task_id, result, status=states.SUCCESS)
+                    self.assertEqual(b.get_result(task_id), result)
+
+    def test_pylibmc_unicode_key(self):
+        with reset_modules("celery.backends.cache"):
+            with self.mock_pylibmc():
+                from celery.backends import cache
+                cache._imp = [None]
+                task_id, result = unicode(uuid()), 42
+                b = cache.CacheBackend(backend='memcache')
+                b.store_result(task_id, result, status=states.SUCCESS)
+                self.assertEqual(b.get_result(task_id), result)
+
+    def test_pylibmc_bytes_key(self):
+        with reset_modules("celery.backends.cache"):
+            with self.mock_pylibmc():
+                from celery.backends import cache
+                cache._imp = [None]
+                task_id, result = str_to_bytes(uuid()), 42
+                b = cache.CacheBackend(backend='memcache')
+                b.store_result(task_id, result, status=states.SUCCESS)
+                self.assertEqual(b.get_result(task_id), result)

+ 16 - 13
celery/tests/test_backends/test_database.py

@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from __future__ import with_statement
 
 import sys
@@ -10,7 +11,7 @@ from celery import states
 from celery.app import app_or_default
 from celery.exceptions import ImproperlyConfigured
 from celery.result import AsyncResult
-from celery.utils import gen_unique_id
+from celery.utils import uuid
 
 from celery.tests.utils import mask_modules
 from celery.tests.utils import unittest
@@ -43,7 +44,8 @@ class test_DatabaseBackend(unittest.TestCase):
     def test_missing_SQLAlchemy_raises_ImproperlyConfigured(self):
         with mask_modules("sqlalchemy"):
             from celery.backends.database import _sqlalchemy_installed
-            self.assertRaises(ImproperlyConfigured, _sqlalchemy_installed)
+            with self.assertRaises(ImproperlyConfigured):
+                _sqlalchemy_installed()
 
     def test_pickle_hack_for_sqla_05(self):
         import sqlalchemy as sa
@@ -66,7 +68,8 @@ class test_DatabaseBackend(unittest.TestCase):
         conf = app_or_default().conf
         prev, conf.CELERY_RESULT_DBURI = conf.CELERY_RESULT_DBURI, None
         try:
-            self.assertRaises(ImproperlyConfigured, DatabaseBackend)
+            with self.assertRaises(ImproperlyConfigured):
+                DatabaseBackend()
         finally:
             conf.CELERY_RESULT_DBURI = prev
 
@@ -86,7 +89,7 @@ class test_DatabaseBackend(unittest.TestCase):
     def test_mark_as_done(self):
         tb = DatabaseBackend()
 
-        tid = gen_unique_id()
+        tid = uuid()
 
         self.assertEqual(tb.get_status(tid), states.PENDING)
         self.assertIsNone(tb.get_result(tid))
@@ -98,7 +101,7 @@ class test_DatabaseBackend(unittest.TestCase):
     def test_is_pickled(self):
         tb = DatabaseBackend()
 
-        tid2 = gen_unique_id()
+        tid2 = uuid()
         result = {"foo": "baz", "bar": SomeClass(12345)}
         tb.mark_as_done(tid2, result)
         # is serialized properly.
@@ -108,19 +111,19 @@ class test_DatabaseBackend(unittest.TestCase):
 
     def test_mark_as_started(self):
         tb = DatabaseBackend()
-        tid = gen_unique_id()
+        tid = uuid()
         tb.mark_as_started(tid)
         self.assertEqual(tb.get_status(tid), states.STARTED)
 
     def test_mark_as_revoked(self):
         tb = DatabaseBackend()
-        tid = gen_unique_id()
+        tid = uuid()
         tb.mark_as_revoked(tid)
         self.assertEqual(tb.get_status(tid), states.REVOKED)
 
     def test_mark_as_retry(self):
         tb = DatabaseBackend()
-        tid = gen_unique_id()
+        tid = uuid()
         try:
             raise KeyError("foo")
         except KeyError, exception:
@@ -134,7 +137,7 @@ class test_DatabaseBackend(unittest.TestCase):
     def test_mark_as_failure(self):
         tb = DatabaseBackend()
 
-        tid3 = gen_unique_id()
+        tid3 = uuid()
         try:
             raise KeyError("foo")
         except KeyError, exception:
@@ -147,7 +150,7 @@ class test_DatabaseBackend(unittest.TestCase):
 
     def test_forget(self):
         tb = DatabaseBackend(backend="memory://")
-        tid = gen_unique_id()
+        tid = uuid()
         tb.mark_as_done(tid, {"foo": "bar"})
         x = AsyncResult(tid)
         x.forget()
@@ -160,7 +163,7 @@ class test_DatabaseBackend(unittest.TestCase):
     def test_save__restore__delete_taskset(self):
         tb = DatabaseBackend()
 
-        tid = gen_unique_id()
+        tid = uuid()
         res = {u"something": "special"}
         self.assertEqual(tb.save_taskset(tid, res), res)
 
@@ -175,8 +178,8 @@ class test_DatabaseBackend(unittest.TestCase):
     def test_cleanup(self):
         tb = DatabaseBackend()
         for i in range(10):
-            tb.mark_as_done(gen_unique_id(), 42)
-            tb.save_taskset(gen_unique_id(), {"foo": "bar"})
+            tb.mark_as_done(uuid(), 42)
+            tb.save_taskset(uuid(), {"foo": "bar"})
         s = tb.ResultSession()
         for t in s.query(Task).all():
             t.date_done = datetime.now() - tb.expires * 2

+ 2 - 0
celery/tests/test_backends/test_pyredis_compat.py

@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
 from nose import SkipTest
 
 from celery.exceptions import ImproperlyConfigured

+ 7 - 7
celery/tests/test_backends/test_redis.py

@@ -3,18 +3,17 @@ from __future__ import with_statement
 
 import sys
 import socket
-from celery.tests.utils import unittest
 
 from nose import SkipTest
 
 from celery.exceptions import ImproperlyConfigured
 
 from celery import states
-from celery.utils import gen_unique_id
+from celery.utils import uuid
 from celery.backends import redis
 from celery.backends.redis import RedisBackend
-
 from celery.tests.utils import mask_modules
+from celery.tests.utils import unittest
 
 _no_redis_msg = "* Redis %s. Will not execute related tests."
 _no_redis_msg_emitted = False
@@ -63,7 +62,7 @@ class TestRedisBackend(unittest.TestCase):
     def test_mark_as_done(self):
         tb = get_redis_or_SkipTest()
 
-        tid = gen_unique_id()
+        tid = uuid()
 
         self.assertEqual(tb.get_status(tid), states.PENDING)
         self.assertIsNone(tb.get_result(tid))
@@ -75,7 +74,7 @@ class TestRedisBackend(unittest.TestCase):
     def test_is_pickled(self):
         tb = get_redis_or_SkipTest()
 
-        tid2 = gen_unique_id()
+        tid2 = uuid()
         result = {"foo": "baz", "bar": SomeClass(12345)}
         tb.mark_as_done(tid2, result)
         # is serialized properly.
@@ -86,7 +85,7 @@ class TestRedisBackend(unittest.TestCase):
     def test_mark_as_failure(self):
         tb = get_redis_or_SkipTest()
 
-        tid3 = gen_unique_id()
+        tid3 = uuid()
         try:
             raise KeyError("foo")
         except KeyError, exception:
@@ -112,6 +111,7 @@ class TestRedisBackendNoRedis(unittest.TestCase):
         prev = redis.RedisBackend.redis
         redis.RedisBackend.redis = None
         try:
-            self.assertRaises(ImproperlyConfigured, redis.RedisBackend)
+            with self.assertRaises(ImproperlyConfigured):
+                redis.RedisBackend()
         finally:
             redis.RedisBackend.redis = prev

+ 12 - 11
celery/tests/test_backends/test_redis_unit.py

@@ -1,11 +1,12 @@
+from __future__ import absolute_import
+
 from datetime import timedelta
 
 from mock import Mock, patch
-from kombu.utils import cached_property
 
 from celery import current_app
 from celery import states
-from celery.utils import gen_unique_id
+from celery.utils import cached_property, uuid
 from celery.utils.timeutils import timedelta_seconds
 
 from celery.tests.utils import unittest
@@ -134,16 +135,16 @@ class test_RedisBackend(unittest.TestCase):
 
     def test_get_set_forget(self):
         b = self.Backend()
-        uuid = gen_unique_id()
-        b.store_result(uuid, 42, states.SUCCESS)
-        self.assertEqual(b.get_status(uuid), states.SUCCESS)
-        self.assertEqual(b.get_result(uuid), 42)
-        b.forget(uuid)
-        self.assertEqual(b.get_status(uuid), states.PENDING)
+        tid = uuid()
+        b.store_result(tid, 42, states.SUCCESS)
+        self.assertEqual(b.get_status(tid), states.SUCCESS)
+        self.assertEqual(b.get_result(tid), 42)
+        b.forget(tid)
+        self.assertEqual(b.get_status(tid), states.PENDING)
 
     def test_set_expires(self):
         b = self.Backend(expires=512)
-        uuid = gen_unique_id()
-        key = b.get_key_for_task(uuid)
-        b.store_result(uuid, 42, states.SUCCESS)
+        tid = uuid()
+        key = b.get_key_for_task(tid)
+        b.store_result(tid, 42, states.SUCCESS)
         self.assertEqual(b.client.expiry[key], 512)

Some files were not shown because too many files changed in this diff