Browse Source

Merge branch 'master' of github.com:celery/celery

Mher Movsisyan 12 years ago
parent
commit
af0492cb30
100 changed files with 701 additions and 924 deletions
  1. 0 1
      .travis.yml
  2. 78 2
      Changelog
  3. 2 0
      celery/__compat__.py
  4. 3 1
      celery/__init__.py
  5. 0 1
      celery/app/__init__.py
  6. 5 7
      celery/app/amqp.py
  7. 3 1
      celery/app/annotations.py
  8. 23 19
      celery/app/base.py
  9. 8 9
      celery/app/builtins.py
  10. 3 3
      celery/app/control.py
  11. 4 11
      celery/app/defaults.py
  12. 9 7
      celery/app/log.py
  13. 2 2
      celery/app/routes.py
  14. 7 9
      celery/app/task.py
  15. 23 23
      celery/app/utils.py
  16. 18 18
      celery/apps/beat.py
  17. 38 37
      celery/apps/worker.py
  18. 3 3
      celery/backends/__init__.py
  19. 2 11
      celery/backends/amqp.py
  20. 4 5
      celery/backends/base.py
  21. 4 4
      celery/backends/cache.py
  22. 0 71
      celery/backends/database/a805d4bd.py
  23. 0 50
      celery/backends/database/dfd042c7.py
  24. 3 8
      celery/backends/database/models.py
  25. 2 2
      celery/backends/redis.py
  26. 13 13
      celery/beat.py
  27. 12 10
      celery/bin/base.py
  28. 18 19
      celery/bin/camqadm.py
  29. 76 72
      celery/bin/celery.py
  30. 1 2
      celery/bin/celerybeat.py
  31. 2 2
      celery/bin/celeryd.py
  32. 5 6
      celery/bin/celeryd_detach.py
  33. 33 33
      celery/bin/celeryd_multi.py
  34. 2 3
      celery/bin/celeryev.py
  35. 8 6
      celery/canvas.py
  36. 2 2
      celery/concurrency/base.py
  37. 3 3
      celery/contrib/batches.py
  38. 15 11
      celery/contrib/migrate.py
  39. 30 15
      celery/contrib/rdb.py
  40. 13 11
      celery/datastructures.py
  41. 13 21
      celery/events/__init__.py
  42. 23 20
      celery/events/cursesmon.py
  43. 9 12
      celery/events/dumper.py
  44. 2 3
      celery/events/snapshot.py
  45. 10 8
      celery/events/state.py
  46. 6 6
      celery/exceptions.py
  47. 6 4
      celery/loaders/base.py
  48. 9 9
      celery/loaders/default.py
  49. 4 6
      celery/local.py
  50. 27 34
      celery/platforms.py
  51. 21 18
      celery/result.py
  52. 17 13
      celery/schedules.py
  53. 0 1
      celery/security/__init__.py
  54. 6 7
      celery/security/certificate.py
  55. 2 3
      celery/security/key.py
  56. 3 3
      celery/security/serialization.py
  57. 3 3
      celery/security/utils.py
  58. 6 9
      celery/task/base.py
  59. 7 7
      celery/task/http.py
  60. 6 7
      celery/task/sets.py
  61. 7 17
      celery/task/trace.py
  62. 1 3
      celery/tests/__init__.py
  63. 0 1
      celery/tests/app/test_amqp.py
  64. 0 4
      celery/tests/app/test_app.py
  65. 5 6
      celery/tests/app/test_beat.py
  66. 0 1
      celery/tests/app/test_control.py
  67. 1 2
      celery/tests/app/test_defaults.py
  68. 2 3
      celery/tests/app/test_loaders.py
  69. 0 1
      celery/tests/app/test_log.py
  70. 1 2
      celery/tests/app/test_routes.py
  71. 6 17
      celery/tests/backends/test_amqp.py
  72. 0 1
      celery/tests/backends/test_backends.py
  73. 0 1
      celery/tests/backends/test_base.py
  74. 1 2
      celery/tests/backends/test_cache.py
  75. 0 1
      celery/tests/backends/test_cassandra.py
  76. 2 22
      celery/tests/backends/test_database.py
  77. 0 1
      celery/tests/backends/test_mongodb.py
  78. 0 1
      celery/tests/backends/test_redis.py
  79. 0 2
      celery/tests/bin/test_base.py
  80. 0 1
      celery/tests/bin/test_camqadm.py
  81. 0 3
      celery/tests/bin/test_celery.py
  82. 0 1
      celery/tests/bin/test_celerybeat.py
  83. 3 7
      celery/tests/bin/test_celeryd.py
  84. 0 1
      celery/tests/bin/test_celeryd_detach.py
  85. 1 2
      celery/tests/bin/test_celeryd_multi.py
  86. 0 1
      celery/tests/bin/test_celeryev.py
  87. 0 85
      celery/tests/compat.py
  88. 3 3
      celery/tests/compat_modules/test_decorators.py
  89. 0 1
      celery/tests/concurrency/test_concurrency.py
  90. 0 1
      celery/tests/concurrency/test_eventlet.py
  91. 0 1
      celery/tests/concurrency/test_gevent.py
  92. 0 1
      celery/tests/concurrency/test_processes.py
  93. 0 1
      celery/tests/concurrency/test_threads.py
  94. 0 1
      celery/tests/contrib/test_migrate.py
  95. 0 1
      celery/tests/contrib/test_rdb.py
  96. 2 6
      celery/tests/events/test_events.py
  97. 0 1
      celery/tests/events/test_snapshot.py
  98. 8 8
      celery/tests/events/test_state.py
  99. 1 2
      celery/tests/security/test_certificate.py
  100. 0 1
      celery/tests/security/test_security.py

+ 0 - 1
.travis.yml

@@ -1,6 +1,5 @@
 language: python
 python:
-    - 2.5
     - 2.6
     - 2.7
 install:

+ 78 - 2
Changelog

@@ -5,14 +5,90 @@
 .. contents::
     :local:
 
-.. _version-3.0.2:
+.. _version-3.1.0:
 
-3.0.2
+3.1.0
 =====
+:state: DEVEL
+:branch: master
 
 - `Task.apply_async` now supports timeout and soft_timeout arguments (Issue #802)
 - `App.control.Inspect.conf` can be used for inspecting worker configuration
 
+.. _version-3.0.3:
+
+3.0.3
+=====
+:release-date: 2012-07-20 09:17 P.M BST
+:by: Ask Solem
+
+- amqplib passes the channel object as part of the delivery_info
+  and it's not pickleable, so we now remove it.
+
+.. _version-3.0.2:
+
+3.0.2
+=====
+:release-date: 2012-07-20 04:00 P.M BST
+:by: Ask Solem
+
+- A bug caused the following task options to not take defaults from the
+   configuration (Issue #867 + Issue #858)
+
+    The following settings were affected:
+
+    - :setting:`CELERY_IGNORE_RESULT`
+    - :setting:`CELERYD_SEND_TASK_ERROR_EMAILS`
+    - :setting:`CELERY_TRACK_STARTED`
+    - :setting:`CElERY_STORE_ERRORS_EVEN_IF_IGNORED`
+
+    Fix contributed by John Watson.
+
+- Task Request: ``delivery_info`` is now passed through as-is (Issue #807).
+
+- The eta argument now supports datetime's with a timezone set (Issue #855).
+
+- The worker's banner displayed the autoscale settings in the wrong order
+  (Issue #859).
+
+- Extension commands are now loaded after concurrency is set up
+  so that they don't interfere with e.g. eventlet patching.
+
+- Fixed bug in the threaded pool (Issue #863)
+
+- The task failure handler mixed up the fields in :func:`sys.exc_info`.
+
+    Fix contributed by Rinat Shigapov.
+
+- Fixed typos and wording in the docs.
+
+    Fix contributed by Paul McMillan
+
+- New setting: :setting:`CELERY_WORKER_DIRECT`
+
+    If enabled each worker will consume from their own dedicated queue
+    which can be used to route tasks to specific workers.
+
+- Fixed several edge case bugs in the add consumer remote control command.
+
+- :mod:`~celery.contrib.migrate`: Can now filter and move tasks to specific
+  workers if :setting:`CELERY_WORKER_DIRECT` is enabled.
+
+    Among other improvements, the following functions have been added:
+
+        * ``move_direct(filterfun, **opts)``
+        * ``move_direct_by_id(task_id, worker_hostname, **opts)``
+        * ``move_direct_by_idmap({task_id: worker_hostname, ...}, **opts)``
+        * ``move_direct_by_taskmap({task_name: worker_hostname, ...}, **opts)``
+
+- :meth:`~celery.Celery.default_connection` now accepts a pool argument that
+  if set to false causes a new connection to be created instead of acquiring
+  one from the pool.
+
+- New signal: :signal:`celeryd_after_setup`.
+
+- Default loader now keeps lowercase attributes from the configuration module.
+
 .. _version-3.0.1:
 
 3.0.1

+ 2 - 0
celery/__compat__.py

@@ -14,6 +14,8 @@ from __future__ import absolute_import
 import operator
 import sys
 
+from functools import reduce
+from future_builtins import map
 from importlib import import_module
 from types import ModuleType
 

+ 3 - 1
celery/__init__.py

@@ -7,6 +7,8 @@
 
 from __future__ import absolute_import
 
+from future_builtins import map
+
 SERIES = 'DEVEL'
 VERSION = (3, 1, 0, 'a1')
 __version__ = '.'.join(map(str, VERSION[0:3])) + ''.join(VERSION[3:])
@@ -14,7 +16,7 @@ __author__ = 'Ask Solem'
 __contact__ = 'ask@celeryproject.org'
 __homepage__ = 'http://celeryproject.org'
 __docformat__ = 'restructuredtext'
-VERSION_BANNER = '%s (%s)' % (__version__, SERIES)
+VERSION_BANNER = '{0} ({1})'.format(__version__, SERIES)
 
 # -eof meta-
 

+ 0 - 1
celery/app/__init__.py

@@ -7,7 +7,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import os
 

+ 5 - 7
celery/app/amqp.py

@@ -25,9 +25,11 @@ from . import routes as _routes
 
 #: Human readable queue declaration.
 QUEUE_FORMAT = """
-. %(name)s exchange:%(exchange)s(%(exchange_type)s) binding:%(routing_key)s
+. {0.name:<16} exchange={0.exchange.name}({0.exchange.type}) \
+key={0.routing_key}
 """
 
+
 class Queues(dict):
     """Queue name⇒ declaration mapping.
 
@@ -101,12 +103,8 @@ class Queues(dict):
         active = self.consume_from
         if not active:
             return ''
-        info = [QUEUE_FORMAT.strip() % {
-                    'name': (name + ':').ljust(12),
-                    'exchange': q.exchange.name,
-                    'exchange_type': q.exchange.type,
-                    'routing_key': q.routing_key}
-                        for name, q in sorted(active.iteritems())]
+        info = [QUEUE_FORMAT.strip().format(q)
+                    for _, q in sorted(active.iteritems())]
         if indent_first:
             return textindent('\n'.join(info), indent)
         return info[0] + '\n' + textindent('\n'.join(info[1:]), indent)

+ 3 - 1
celery/app/annotations.py

@@ -12,6 +12,8 @@
 """
 from __future__ import absolute_import
 
+from future_builtins import filter
+
 from celery.utils.functional import firstmethod, mpromise
 from celery.utils.imports import instantiate
 
@@ -52,4 +54,4 @@ def prepare(annotations):
         return ()
     elif not isinstance(annotations, (list, tuple)):
         annotations = (annotations, )
-    return map(expand_annotation, annotations)
+    return [expand_annotation(a) for a in annotations]

+ 23 - 19
celery/app/base.py

@@ -7,14 +7,14 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import warnings
 
 from collections import deque
 from contextlib import contextmanager
 from copy import deepcopy
-from functools import wraps
+from functools import reduce, wraps
+from operator import attrgetter
 from threading import Lock
 
 from billiard.util import register_after_fork
@@ -175,8 +175,10 @@ class Celery(object):
         self.conf.update(self.loader.cmdline_config_parser(argv, namespace))
 
     def send_task(self, name, args=None, kwargs=None, countdown=None,
-            eta=None, task_id=None, publisher=None, connection=None,
-            result_cls=None, expires=None, queues=None, **options):
+            eta=None, task_id=None, producer=None, connection=None,
+            result_cls=None, expires=None, queues=None, publisher=None,
+            **options):
+        producer = producer or publisher  # XXX compat
         if self.conf.CELERY_ALWAYS_EAGER:  # pragma: no cover
             warnings.warn(AlwaysEagerIgnored(
                 'CELERY_ALWAYS_EAGER has no effect on send_task'))
@@ -186,16 +188,15 @@ class Celery(object):
         options.setdefault('compression',
                            self.conf.CELERY_MESSAGE_COMPRESSION)
         options = router.route(options, name, args, kwargs)
-        with self.default_producer(publisher) as producer:
+        with self.producer_or_acquire(producer) as producer:
             return result_cls(producer.publish_task(name, args, kwargs,
                         task_id=task_id,
                         countdown=countdown, eta=eta,
                         expires=expires, **options))
 
-    def connection(self, hostname=None, userid=None,
-            password=None, virtual_host=None, port=None, ssl=None,
-            insist=None, connect_timeout=None, transport=None,
-            transport_options=None, **kwargs):
+    def connection(self, hostname=None, userid=None, password=None,
+            virtual_host=None, port=None, ssl=None, connect_timeout=None,
+            transport=None, transport_options=None, heartbeat=None, **kwargs):
         conf = self.conf
         return self.amqp.Connection(
                     hostname or conf.BROKER_HOST,
@@ -204,16 +205,17 @@ class Celery(object):
                     virtual_host or conf.BROKER_VHOST,
                     port or conf.BROKER_PORT,
                     transport=transport or conf.BROKER_TRANSPORT,
-                    insist=self.either('BROKER_INSIST', insist),
                     ssl=self.either('BROKER_USE_SSL', ssl),
                     connect_timeout=self.either(
-                                'BROKER_CONNECTION_TIMEOUT', connect_timeout),
+                        'BROKER_CONNECTION_TIMEOUT', connect_timeout),
+                    heartbeat=heartbeat,
                     transport_options=dict(conf.BROKER_TRANSPORT_OPTIONS,
                                            **transport_options or {}))
     broker_connection = connection
 
     @contextmanager
-    def default_connection(self, connection=None, pool=True, *args, **kwargs):
+    def connection_or_acquire(self, connection=None, pool=True,
+            *args, **kwargs):
         if connection:
             yield connection
         else:
@@ -223,14 +225,16 @@ class Celery(object):
             else:
                 with self.connection() as connection:
                     yield connection
+    default_connection = connection_or_acquire  # XXX compat
 
     @contextmanager
-    def default_producer(self, producer=None):
+    def producer_or_acquire(self, producer=None):
         if producer:
             yield producer
         else:
             with self.amqp.producer_pool.acquire(block=True) as producer:
                 yield producer
+    default_producer = producer_or_acquire  # XXX compat
 
     def with_default_connection(self, fun):
         """With any function accepting a `connection`
@@ -242,14 +246,14 @@ class Celery(object):
 
         **Deprecated**
 
-        Use ``with app.default_connection(connection)`` instead.
+        Use ``with app.connection_or_acquire(connection)`` instead.
 
         """
         @wraps(fun)
         def _inner(*args, **kwargs):
             connection = kwargs.pop('connection', None)
-            with self.default_connection(connection) as c:
-                return fun(*args, **dict(kwargs, connection=c))
+            with self.connection_or_acquire(connection) as c:
+                return fun(*args, connection=c, **kwargs)
         return _inner
 
     def prepare_config(self, c):
@@ -336,11 +340,11 @@ class Celery(object):
         return type(name or Class.__name__, (Class, ), attrs)
 
     def _rgetattr(self, path):
-        return reduce(getattr, [self] + path.split('.'))
+        return attrgetter(path)(self)
 
     def __repr__(self):
-        return '<%s %s:0x%x>' % (self.__class__.__name__,
-                                 self.main or '__main__', id(self), )
+        return '<{0} {1}:0x{2:x}>'.format(
+            type(self).__name__, self.main or '__main__', id(self))
 
     def __reduce__(self):
         # Reduce only pickles the configuration changes,

+ 8 - 9
celery/app/builtins.py

@@ -8,9 +8,9 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from collections import deque
+from future_builtins import map, zip
 from itertools import starmap
 
 from celery._state import get_current_worker_task
@@ -73,7 +73,8 @@ def add_unlock_chord_task(app):
     @app.task(name='celery.chord_unlock', max_retries=None)
     def unlock_chord(group_id, callback, interval=1, propagate=False,
             max_retries=None, result=None):
-        result = _res.GroupResult(group_id, map(_res.AsyncResult, result))
+        AR = _res.AsyncResult
+        result = _res.GroupResult(group_id, [AR(r) for r in result])
         j = result.join_native if result.supports_native_join else result.join
         if result.ready():
             subtask(callback).delay(j(propagate=propagate))
@@ -134,7 +135,7 @@ def add_group_task(app):
             if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER:
                 return app.GroupResult(result.id,
                         [task.apply(group_id=group_id) for task in taskit])
-            with app.default_producer() as pub:
+            with app.producer_or_acquire() as pub:
                 [task.apply_async(group_id=group_id, publisher=pub,
                                   add_to_parent=False) for task in taskit]
             parent = get_current_worker_task()
@@ -156,9 +157,8 @@ def add_group_task(app):
                     tid = opts['task_id'] = uuid()
                 return task, self.AsyncResult(tid)
 
-            tasks, results = zip(*[prepare_member(task) for task in tasks])
-            return (tasks, self.app.GroupResult(group_id, results),
-                    group_id, args)
+            tasks, res = list(zip(*[prepare_member(task) for task in tasks]))
+            return (tasks, self.app.GroupResult(group_id, res), group_id, args)
 
         def apply_async(self, partial_args=(), kwargs={}, **options):
             if self.app.conf.CELERY_ALWAYS_EAGER:
@@ -271,7 +271,7 @@ def add_chord_task(app):
 
             # - convert back to group if serialized
             if not isinstance(header, group):
-                header = group(map(maybe_subtask, header))
+                header = group([maybe_subtask(t) for t in  header])
             # - eager applies the group inline
             if eager:
                 return header.apply(args=partial_args, task_id=group_id)
@@ -286,8 +286,7 @@ def add_chord_task(app):
                                        propagate=propagate,
                                        result=results)
             # - call the header group, returning the GroupResult.
-            # XXX Python 2.5 doesn't allow kwargs after star-args.
-            return header(*partial_args, **{'task_id': group_id})
+            return header(*partial_args, task_id=group_id)
 
         def _prepare_member(self, task, body, group_id):
             opts = task.options

+ 3 - 3
celery/app/control.py

@@ -8,7 +8,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from kombu.pidbox import Mailbox
 from kombu.utils import cached_property
@@ -84,6 +83,7 @@ class Inspect(object):
     def conf(self):
         return self._request('dump_conf')
 
+
 class Control(object):
     Mailbox = Mailbox
 
@@ -104,7 +104,7 @@ class Control(object):
         :returns: the number of tasks discarded.
 
         """
-        with self.app.default_connection(connection) as conn:
+        with self.app.connection_or_acquire(connection) as conn:
             return self.app.amqp.TaskConsumer(conn).purge()
     discard_all = purge
 
@@ -254,7 +254,7 @@ class Control(object):
             received.
 
         """
-        with self.app.default_connection(connection) as conn:
+        with self.app.connection_or_acquire(connection) as conn:
             arguments = dict(arguments or {}, **extra_kwargs)
             return self.mailbox(conn)._broadcast(command, arguments,
                                                  destination, reply, timeout,

+ 4 - 11
celery/app/defaults.py

@@ -58,7 +58,8 @@ class Option(object):
         return self.typemap[self.type](value)
 
     def __repr__(self):
-        return '<Option: type->%s default->%r>' % (self.type, self.default)
+        return '<Option: type->{0} default->{1!r}>'.format(self.type,
+                                                           self.default)
 
 
 NAMESPACES = {
@@ -67,9 +68,8 @@ NAMESPACES = {
         'CONNECTION_TIMEOUT': Option(4, type='float'),
         'CONNECTION_RETRY': Option(True, type='bool'),
         'CONNECTION_MAX_RETRIES': Option(100, type='int'),
+        'HEARTBEAT': Option(3, type='int'),
         'POOL_LIMIT': Option(10, type='int'),
-        'INSIST': Option(False, type='bool',
-                         deprecate_by='2.4', remove_by='4.0'),
         'USE_SSL': Option(False, type='bool'),
         'TRANSPORT': Option(type='string'),
         'TRANSPORT_OPTIONS': Option({}, type='dict'),
@@ -90,11 +90,6 @@ NAMESPACES = {
     'CELERY': {
         'ACKS_LATE': Option(False, type='bool'),
         'ALWAYS_EAGER': Option(False, type='bool'),
-        'AMQP_TASK_RESULT_EXPIRES': Option(type='float',
-                deprecate_by='2.5', remove_by='4.0',
-                alt='CELERY_TASK_RESULT_EXPIRES'),
-        'AMQP_TASK_RESULT_CONNECTION_MAX': Option(1, type='int',
-                remove_by='2.5', alt='BROKER_POOL_LIMIT'),
         'ANNOTATIONS': Option(type='any'),
         'BROADCAST_QUEUE': Option('celeryctl'),
         'BROADCAST_EXCHANGE': Option('celeryctl'),
@@ -136,8 +131,6 @@ NAMESPACES = {
         'SEND_TASK_ERROR_EMAILS': Option(False, type='bool'),
         'SEND_TASK_SENT_EVENT': Option(False, type='bool'),
         'STORE_ERRORS_EVEN_IF_IGNORED': Option(False, type='bool'),
-        'TASK_ERROR_WHITELIST': Option((), type='tuple',
-            deprecate_by='2.5', remove_by='4.0'),
         'TASK_PUBLISH_RETRY': Option(True, type='bool'),
         'TASK_PUBLISH_RETRY_POLICY': Option({
                 'max_retries': 100,
@@ -230,7 +223,7 @@ def find_deprecated_settings(source):
     from celery.utils import warn_deprecated
     for name, opt in flatten(NAMESPACES):
         if (opt.deprecate_by or opt.remove_by) and getattr(source, name, None):
-            warn_deprecated(description='The %r setting' % (name, ),
+            warn_deprecated(description='The {0!r} setting'.format(name),
                             deprecation=opt.deprecate_by,
                             removal=opt.remove_by,
                             alternative=opt.alt)

+ 9 - 7
celery/app/log.py

@@ -16,12 +16,13 @@ import logging
 import os
 import sys
 
+from logging.handlers import WatchedFileHandler
+
 from kombu.log import NullHandler
 
 from celery import signals
 from celery._state import get_current_task
 from celery.utils import isatty
-from celery.utils.compat import WatchedFileHandler
 from celery.utils.log import (
     get_logger, mlevel,
     ColorFormatter, ensure_process_aware_logger,
@@ -94,12 +95,13 @@ class Logging(object):
             if self.app.conf.CELERYD_HIJACK_ROOT_LOGGER:
                 root.handlers = []
 
-            for logger in filter(None, (root, get_multiprocessing_logger())):
-                self.setup_handlers(logger, logfile, format,
-                                    colorize, **kwargs)
-                if loglevel:
-                    logger.setLevel(loglevel)
-                signals.after_setup_logger.send(sender=None, logger=logger,
+            for logger in root, get_multiprocessing_logger():
+                if logger is not None:
+                    self.setup_handlers(logger, logfile, format,
+                                        colorize, **kwargs)
+                    if loglevel:
+                        logger.setLevel(loglevel)
+                    signals.after_setup_logger.send(sender=None, logger=logger,
                                             loglevel=loglevel, logfile=logfile,
                                             format=format, colorize=colorize)
             # then setup the root task logger.

+ 2 - 2
celery/app/routes.py

@@ -64,7 +64,7 @@ class Router(object):
             except KeyError:
                 if not self.create_missing:
                     raise QueueNotFound(
-                        'Queue %r is not defined in CELERY_QUEUES' % queue)
+                        'Queue {0!r} missing from CELERY_QUEUES'.format(queue))
                 for key in 'exchange', 'routing_key':
                     if route.get(key) is None:
                         route[key] = queue
@@ -92,4 +92,4 @@ def prepare(routes):
         return ()
     if not isinstance(routes, (list, tuple)):
         routes = (routes, )
-    return map(expand_route, routes)
+    return [expand_route(route) for route in routes]

+ 7 - 9
celery/app/task.py

@@ -7,7 +7,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from celery import current_app
 from celery import states
@@ -68,7 +67,7 @@ class Context(object):
             return default
 
     def __repr__(self):
-        return '<Context: %r>' % (vars(self, ))
+        return '<Context: {0!r}>'.format(vars(self))
 
     @property
     def children(self):
@@ -121,9 +120,8 @@ class TaskType(type):
         return instance.__class__
 
     def __repr__(cls):
-        if cls._app:
-            return '<class %s of %s>' % (cls.__name__, cls._app, )
-        return '<unbound %s>' % (cls.__name__, )
+        return ('<class {0.__name__} of {0._app}>' if cls._app
+           else '<unbound {0.__name__}>').format(cls)
 
 
 class Task(object):
@@ -456,7 +454,7 @@ class Task(object):
 
         if connection:
             producer = app.amqp.TaskProducer(connection)
-        with app.default_producer(producer) as P:
+        with app.producer_or_acquire(producer) as P:
             evd = None
             if conf.CELERY_SEND_TASK_SENT_EVENT:
                 evd = app.events.Dispatcher(channel=P.channel,
@@ -513,7 +511,7 @@ class Task(object):
             ...     twitter = Twitter(oauth=auth)
             ...     try:
             ...         twitter.post_status_update(message)
-            ...     except twitter.FailWhale, exc:
+            ...     except twitter.FailWhale as exc:
             ...         # Retry in 5 minutes.
             ...         raise tweet.retry(countdown=60 * 5, exc=exc)
 
@@ -550,7 +548,7 @@ class Task(object):
             if exc:
                 maybe_reraise()
             raise self.MaxRetriesExceededError(
-                    """Can't retry %s[%s] args:%s kwargs:%s""" % (
+                    "Can't retry {0}[{1}] args:{2} kwargs:{3}".format(
                         self.name, options['task_id'], args, kwargs))
 
         # If task was executed eagerly using apply(),
@@ -768,7 +766,7 @@ class Task(object):
 
     def __repr__(self):
         """`repr(task)`"""
-        return '<@task: %s>' % (self.name, )
+        return '<@task: {0.name}>'.format(self)
 
     @property
     def request(self):

+ 23 - 23
celery/app/utils.py

@@ -20,13 +20,13 @@ from .defaults import find
 
 #: Format used to generate bugreport information.
 BUGREPORT_INFO = """
-software -> celery:%(celery_v)s kombu:%(kombu_v)s py:%(py_v)s
-            billiard:%(billiard_v)s %(driver_v)s
-platform -> system:%(system)s arch:%(arch)s imp:%(py_i)s
-loader   -> %(loader)s
-settings -> transport:%(transport)s results:%(results)s
+software -> celery:{celery_v} kombu:{kombu_v} py:{py_v}
+            billiard:{billiard_v} {driver_v}
+platform -> system:{system} arch:{arch} imp:{py_i}
+loader   -> {loader}
+settings -> transport:{transport} results:{results}
 
-%(human_settings)s
+{human_settings}
 """
 
 
@@ -85,12 +85,12 @@ class Settings(datastructures.ConfigurationView):
             False
 
         """
-        return self['_'.join(filter(None, parts))]
+        return self['_'.join(part for part in parts if part)]
 
     def humanize(self):
         """Returns a human readable string showing changes to the
         configuration."""
-        return '\n'.join('%s %s' % (key + ':', pretty(value, width=50))
+        return '\n'.join('{0}: {1}'.format(key, pretty(value, width=50))
                         for key, value in self.without_defaults().iteritems())
 
 
@@ -132,21 +132,21 @@ def bugreport(app):
 
     try:
         trans = app.connection().transport
-        driver_v = '%s:%s' % (trans.driver_name, trans.driver_version())
+        driver_v = '{0}:{1}'.format(trans.driver_name, trans.driver_version())
     except Exception:
         driver_v = ''
 
-    return BUGREPORT_INFO % {
-        'system': _platform.system(),
-        'arch': ', '.join(filter(None, _platform.architecture())),
-        'py_i': platforms.pyimplementation(),
-        'celery_v': celery.VERSION_BANNER,
-        'kombu_v': kombu.__version__,
-        'billiard_v': billiard.__version__,
-        'py_v': _platform.python_version(),
-        'driver_v': driver_v,
-        'transport': app.conf.BROKER_TRANSPORT or 'amqp',
-        'results': app.conf.CELERY_RESULT_BACKEND or 'disabled',
-        'human_settings': app.conf.humanize(),
-        'loader': qualname(app.loader.__class__),
-    }
+    return BUGREPORT_INFO.format(
+        system=_platform.system(),
+        arch=', '.join(x for x in _platform.architecture() if x),
+        py_i=platforms.pyimplementation(),
+        celery_v=celery.VERSION_BANNER,
+        kombu_v=kombu.__version__,
+        billiard_v=billiard.__version__,
+        py_v=_platform.python_version(),
+        driver_v=driver_v,
+        transport=app.conf.BROKER_TRANSPORT or 'amqp',
+        results=app.conf.CELERY_RESULT_BACKEND or 'disabled',
+        human_settings=app.conf.humanize(),
+        loader=qualname(app.loader.__class__),
+    )

+ 18 - 18
celery/apps/beat.py

@@ -24,12 +24,12 @@ from celery.utils.timeutils import humanize_seconds
 
 STARTUP_INFO_FMT = """
 Configuration ->
-    . broker -> %(conninfo)s
-    . loader -> %(loader)s
-    . scheduler -> %(scheduler)s
-%(scheduler_info)s
-    . logfile -> %(logfile)s@%(loglevel)s
-    . maxinterval -> %(hmax_interval)s (%(max_interval)ss)
+    . broker -> {conninfo}
+    . loader -> {loader}
+    . scheduler -> {scheduler}
+{scheduler_info}
+    . logfile -> {logfile}@%{loglevel}
+    . maxinterval -> {hmax_interval} ({max_interval}s)
 """.strip()
 
 logger = get_logger('celery.beat')
@@ -62,7 +62,7 @@ class Beat(configurated):
 
     def run(self):
         print(str(self.colored.cyan(
-                    'celerybeat v%s is starting.' % VERSION_BANNER)))
+                    'celerybeat v{0} is starting.'.format(VERSION_BANNER))))
         self.init_loader()
         self.set_process_title()
         self.start_scheduler()
@@ -95,7 +95,7 @@ class Beat(configurated):
         try:
             self.install_sync_handler(beat)
             beat.start()
-        except Exception, exc:
+        except Exception as exc:
             logger.critical('celerybeat raised exception %s: %r',
                             exc.__class__, exc,
                             exc_info=True)
@@ -108,16 +108,16 @@ class Beat(configurated):
 
     def startup_info(self, beat):
         scheduler = beat.get_scheduler(lazy=True)
-        return STARTUP_INFO_FMT % {
-            'conninfo': self.app.connection().as_uri(),
-            'logfile': self.logfile or '[stderr]',
-            'loglevel': LOG_LEVELS[self.loglevel],
-            'loader': qualname(self.app.loader),
-            'scheduler': qualname(scheduler),
-            'scheduler_info': scheduler.info,
-            'hmax_interval': humanize_seconds(beat.max_interval),
-            'max_interval': beat.max_interval,
-        }
+        return STARTUP_INFO_FMT.format(
+            conninfo=self.app.connection().as_uri(),
+            logfile=self.logfile or '[stderr]',
+            loglevel=LOG_LEVELS[self.loglevel],
+            loader=qualname(self.app.loader),
+            scheduler=qualname(scheduler),
+            scheduler_info=scheduler.info,
+            hmax_interval=humanize_seconds(beat.max_interval),
+            max_interval=beat.max_interval,
+            )
 
     def set_process_title(self):
         arg_start = 'manage' in sys.argv[0] and 2 or 1

+ 38 - 37
celery/apps/worker.py

@@ -10,7 +10,7 @@
     platform tweaks, and so on.
 
 """
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
 import logging
 import os
@@ -44,13 +44,12 @@ logger = get_logger(__name__)
 
 def active_thread_count():
     from threading import enumerate
-    # must use .getName on Python 2.5
     return sum(1 for t in enumerate()
-        if not t.getName().startswith('Dummy-'))
+        if not t.name.startswith('Dummy-'))
 
 
 def safe_say(msg):
-    sys.__stderr__.write('\n%s\n' % msg)
+    print('\n{0}'.format(msg), file=sys.__stderr__)
 
 ARTLINES = [
     ' --------------',
@@ -68,25 +67,25 @@ ARTLINES = [
 ]
 
 BANNER = """\
-celery@%(hostname)s v%(version)s
+celery@{hostname} v{version}
 
 [Configuration]
-. broker:      %(conninfo)s
-. app:         %(app)s
-. concurrency: %(concurrency)s
-. events:      %(events)s
+. broker:      {conninfo}
+. app:         {app}
+. concurrency: {concurrency}
+. events:      {events}
 
 [Queues]
-%(queues)s
+{queues}
 """
 
 EXTRA_INFO_FMT = """
 [Tasks]
-%(tasks)s
+{tasks}
 """
 
 UNKNOWN_QUEUE = """\
-Trying to select queue subset of %r, but queue %s is not
+Trying to select queue subset of {0!r}, but queue {1} is not
 defined in the CELERY_QUEUES setting.
 
 If you want to automatically declare unknown queues you can
@@ -174,13 +173,14 @@ class Worker(configurated):
 
     def on_consumer_ready(self, consumer):
         signals.worker_ready.send(sender=consumer)
-        print('celery@%s has started.' % self.hostname)
+        print('celery@{0.hostname} has started.'.format(self))
 
     def init_queues(self):
         try:
             self.app.select_queues(self.use_queues)
-        except KeyError, exc:
-            raise ImproperlyConfigured(UNKNOWN_QUEUE % (self.use_queues, exc))
+        except KeyError as exc:
+            raise ImproperlyConfigured(
+                    UNKNOWN_QUEUE.format(self.use_queues, exc))
         if self.app.conf.CELERY_WORKER_DIRECT:
             self.app.amqp.queues.select_add(worker_direct(self.hostname))
 
@@ -190,49 +190,50 @@ class Worker(configurated):
 
     def purge_messages(self):
         count = self.app.control.purge()
-        print('purge: Erased %d %s from the queue.\n' % (
+        print('purge: Erased {0} {1} from the queue.\n'.format(
                 count, pluralize(count, 'message')))
 
     def tasklist(self, include_builtins=True):
         tasks = self.app.tasks.keys()
         if not include_builtins:
-            tasks = filter(lambda s: not s.startswith('celery.'), tasks)
-        return '\n'.join('  . %s' % task for task in sorted(tasks))
+            tasks = [t for t in tasks if not t.startswith('celery.')]
+        return '\n'.join('  . {0}'.format(task) for task in sorted(tasks))
 
     def extra_info(self):
         if self.loglevel <= logging.INFO:
             include_builtins = self.loglevel <= logging.DEBUG
             tasklist = self.tasklist(include_builtins=include_builtins)
-            return EXTRA_INFO_FMT % {'tasks': tasklist}
+            return EXTRA_INFO_FMT.format(tasks=tasklist)
 
     def startup_info(self):
         app = self.app
         concurrency = unicode(self.concurrency)
-        appr = '%s:0x%x' % (app.main or '__main__', id(app))
+        appr = '{0}:0x{1:x}'.format(app.main or '__main__', id(app))
         if not isinstance(app.loader, AppLoader):
             loader = qualname(app.loader)
             if loader.startswith('celery.loaders'):
                 loader = loader[14:]
-            appr += ' (%s)' % loader
+            appr += ' ({0})'.format(loader)
         if self.autoscale:
-            concurrency = '{min=%s, max=%s}' % tuple(self.autoscale)
+            max, min = self.autoscale
+            concurrency = '{{min={0}, max={1}}}'.format(min, max)
         pool = self.pool_cls
         if not isinstance(pool, basestring):
             pool = pool.__module__
-        concurrency += ' (%s)' % pool.split('.')[-1]
+        concurrency += ' ({0})'.format(pool.split('.')[-1])
         events = 'ON'
         if not self.send_events:
             events = 'OFF (enable -E to monitor this worker)'
 
-        banner = (BANNER % {
-            'app': appr,
-            'hostname': self.hostname,
-            'version': VERSION_BANNER,
-            'conninfo': self.app.connection().as_uri(),
-            'concurrency': concurrency,
-            'events': events,
-            'queues': app.amqp.queues.format(indent=0, indent_first=False),
-        }).splitlines()
+        banner = BANNER.format(
+            app=appr,
+            hostname=self.hostname,
+            version=VERSION_BANNER,
+            conninfo=self.app.connection().as_uri(),
+            concurrency=concurrency,
+            events=events,
+            queues=app.amqp.queues.format(indent=0, indent_first=False),
+        ).splitlines()
 
         # integrate the ASCII art.
         for i, x in enumerate(banner):
@@ -282,7 +283,7 @@ class Worker(configurated):
 
     def set_process_status(self, info):
         return platforms.set_mp_process_title('celeryd',
-                info='%s (%s)' % (info, platforms.strargv(sys.argv)),
+                info='{0} ({1})'.format(info, platforms.strargv(sys.argv)),
                 hostname=self.hostname)
 
 
@@ -296,7 +297,7 @@ def _shutdown_handler(worker, sig='TERM', how='Warm', exc=SystemExit,
             if current_process()._name == 'MainProcess':
                 if callback:
                     callback(worker)
-                safe_say('celeryd: %s shutdown (MainProcess)' % how)
+                safe_say('celeryd: {0} shutdown (MainProcess)'.format(how))
             if active_thread_count() > 1:
                 setattr(state, {'Warm': 'should_stop',
                                 'Cold': 'should_terminate'}[how], True)
@@ -327,7 +328,7 @@ def install_worker_restart_handler(worker, sig='SIGHUP'):
     def restart_worker_sig_handler(signum, frame):
         """Signal handler restarting the current python program."""
         set_in_sighandler(True)
-        safe_say('Restarting celeryd (%s)' % (' '.join(sys.argv), ))
+        safe_say('Restarting celeryd ({0})'.format(' '.join(sys.argv)))
         pid = os.fork()
         if pid == 0:
             os.execv(sys.executable, [sys.executable] + sys.argv)
@@ -373,8 +374,8 @@ def install_HUP_not_supported_handler(worker, sig='SIGHUP'):
     def warn_on_HUP_handler(signum, frame):
         set_in_sighandler(True)
         try:
-            safe_say('%(sig)s not supported: Restarting with %(sig)s is '
-                     'unstable on this platform!' % {'sig': sig})
+            safe_say('{sig} not supported: Restarting with {sig} is '
+                     'unstable on this platform!'.format(sig=sig))
         finally:
             set_in_sighandler(False)
     platforms.signals[sig] = warn_on_HUP_handler

+ 3 - 3
celery/backends/__init__.py

@@ -18,7 +18,7 @@ from celery.utils.imports import symbol_by_name
 from celery.utils.functional import memoize
 
 UNKNOWN_BACKEND = """\
-Unknown result backend: %r.  Did you spell that correctly? (%r)\
+Unknown result backend: {0!r}.  Did you spell that correctly? ({1!r})\
 """
 
 BACKEND_ALIASES = {
@@ -43,8 +43,8 @@ def get_backend_cls(backend=None, loader=None):
     aliases = dict(BACKEND_ALIASES, **loader.override_backends)
     try:
         return symbol_by_name(backend, aliases)
-    except ValueError, exc:
-        raise ValueError, ValueError(UNKNOWN_BACKEND % (
+    except ValueError as exc:
+        raise ValueError, ValueError(UNKNOWN_BACKEND.format(
                     backend, exc)), sys.exc_info()[2]
 
 

+ 2 - 11
celery/backends/amqp.py

@@ -9,7 +9,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import socket
 import threading
@@ -76,17 +75,9 @@ class AMQPBackend(BaseDictBackend):
         self.serializer = serializer or conf.CELERY_RESULT_SERIALIZER
         self.auto_delete = auto_delete
 
-        # AMQP_TASK_RESULT_EXPIRES setting is deprecated and will be
-        # removed in version 4.0.
-        dexpires = conf.CELERY_AMQP_TASK_RESULT_EXPIRES
-
         self.expires = None
-        if 'expires' in kwargs:
-            if kwargs['expires'] is not None:
-                self.expires = self.prepare_expires(kwargs['expires'])
-        else:
-            self.expires = self.prepare_expires(dexpires)
-
+        if 'expires' not in kwargs or kwargs['expires'] is not None:
+            self.expires = self.prepare_expires(kwargs.get('expires'))
         if self.expires:
             self.queue_arguments['x-expires'] = int(self.expires * 1000)
         self.mutex = threading.Lock()

+ 4 - 5
celery/backends/base.py

@@ -19,6 +19,7 @@ import time
 import sys
 
 from datetime import timedelta
+from future_builtins import map
 
 from kombu import serialization
 from kombu.utils.encoding import bytes_to_str, ensure_bytes, from_utf8
@@ -140,8 +141,7 @@ class BaseBackend(object):
         return result
 
     def forget(self, task_id):
-        raise NotImplementedError('%s does not implement forget.' % (
-                    self.__class__))
+        raise NotImplementedError('backend does not implement forget.')
 
     def wait_for(self, task_id, timeout=None, propagate=True, interval=0.5):
         """Wait for task and return its result.
@@ -259,8 +259,7 @@ class BaseDictBackend(BaseBackend):
         self._forget(task_id)
 
     def _forget(self, task_id):
-        raise NotImplementedError('%s does not implement forget.' % (
-                    self.__class__))
+        raise NotImplementedError('backend does not implement forget.')
 
     def get_status(self, task_id):
         """Get the status of a task."""
@@ -411,7 +410,7 @@ class KeyValueStoreBackend(BaseDictBackend):
             for key, value in r.iteritems():
                 yield bytes_to_str(key), value
             if timeout and iterations * interval >= timeout:
-                raise TimeoutError('Operation timed out (%s)' % (timeout, ))
+                raise TimeoutError('Operation timed out ({0})'.format(timeout))
             time.sleep(interval)  # don't busy loop.
             iterations += 0
 

+ 4 - 4
celery/backends/cache.py

@@ -91,9 +91,9 @@ class CacheBackend(KeyValueStoreBackend):
             self.Client = backends[self.backend]()
         except KeyError:
             raise ImproperlyConfigured(
-                    'Unknown cache backend: %s. Please use one of the '
-                    'following backends: %s' % (self.backend,
-                                                ', '.join(backends.keys())))
+                    'Unknown cache backend: {0}. Please use one of the '
+                    'following backends: {1}'.format(self.backend,
+                                        ', '.join(backends.keys())))
 
     def get(self, key):
         return self.client.get(key)
@@ -119,7 +119,7 @@ class CacheBackend(KeyValueStoreBackend):
 
     def __reduce__(self, args=(), kwargs={}):
         servers = ';'.join(self.servers)
-        backend = '%s://%s/' % (self.backend, servers)
+        backend = '{0}://{1}/'.format(self.backend, servers)
         kwargs.update(
             dict(backend=backend,
                  expires=self.expires,

+ 0 - 71
celery/backends/database/a805d4bd.py

@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    celery.backends.database.a805d4bd
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    This module fixes a bug with pickling and relative imports in Python < 2.6.
-
-    The problem is with pickling an e.g. `exceptions.KeyError` instance.
-    As SQLAlchemy has its own `exceptions` module, pickle will try to
-    lookup :exc:`KeyError` in the wrong module, resulting in this exception::
-
-        cPickle.PicklingError: Can't pickle <type 'exceptions.KeyError'>:
-            attribute lookup exceptions.KeyError failed
-
-    doing `import exceptions` just before the dump in `sqlalchemy.types`
-    reveals the source of the bug::
-
-        EXCEPTIONS: <module 'sqlalchemy.exc' from '/var/lib/hudson/jobs/celery/
-            workspace/buildenv/lib/python2.5/site-packages/sqlalchemy/exc.pyc'>
-
-    Hence the random module name 'a805d5bd' is taken to decrease the chances of
-    a collision.
-
-"""
-from __future__ import absolute_import
-
-from sqlalchemy.types import PickleType as _PickleType
-
-
-class PickleType(_PickleType):  # pragma: no cover
-
-    def bind_processor(self, dialect):
-        impl_processor = self.impl.bind_processor(dialect)
-        dumps = self.pickler.dumps
-        protocol = self.protocol
-        if impl_processor:
-
-            def process(value):
-                if value is not None:
-                    value = dumps(value, protocol)
-                return impl_processor(value)
-
-        else:
-
-            def process(value):  # noqa
-                if value is not None:
-                    value = dumps(value, protocol)
-                return value
-        return process
-
-    def result_processor(self, dialect, coltype):
-        impl_processor = self.impl.result_processor(dialect, coltype)
-        loads = self.pickler.loads
-        if impl_processor:
-
-            def process(value):
-                value = impl_processor(value)
-                if value is not None:
-                    return loads(value)
-        else:
-
-            def process(value):  # noqa
-                if value is not None:
-                    return loads(value)
-        return process
-
-    def copy_value(self, value):
-        if self.mutable:
-            return self.pickler.loads(self.pickler.dumps(value, self.protocol))
-        else:
-            return value

+ 0 - 50
celery/backends/database/dfd042c7.py

@@ -1,50 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    celery.backends.database.dfd042c7
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    SQLAlchemy 0.5.8 version of :mod:`~celery.backends.database.a805d4bd`,
-    see the docstring of that module for an explanation of why we need
-    this workaround.
-
-"""
-from __future__ import absolute_import
-
-from sqlalchemy.types import PickleType as _PickleType
-from sqlalchemy import util
-
-
-class PickleType(_PickleType):  # pragma: no cover
-
-    def process_bind_param(self, value, dialect):
-        dumps = self.pickler.dumps
-        protocol = self.protocol
-        if value is not None:
-            return dumps(value, protocol)
-
-    def process_result_value(self, value, dialect):
-        loads = self.pickler.loads
-        if value is not None:
-            return loads(str(value))
-
-    def copy_value(self, value):
-        if self.mutable:
-            return self.pickler.loads(self.pickler.dumps(value, self.protocol))
-        else:
-            return value
-
-    def compare_values(self, x, y):
-        if self.comparator:
-            return self.comparator(x, y)
-        elif self.mutable and not hasattr(x, '__eq__') and x is not None:
-            util.warn_deprecated(
-                    'Objects stored with PickleType when mutable=True '
-                    'must implement __eq__() for reliable comparison.')
-            a = self.pickler.dumps(x, self.protocol)
-            b = self.pickler.dumps(y, self.protocol)
-            return a == b
-        else:
-            return x == y
-
-    def is_mutable(self):
-        return self.mutable

+ 3 - 8
celery/backends/database/models.py

@@ -11,17 +11,12 @@ from __future__ import absolute_import
 from datetime import datetime
 
 import sqlalchemy as sa
+from sqlalchemy.types import PickleType
 
 from celery import states
 
 from .session import ResultModelBase
 
-# See docstring of a805d4bd for an explanation for this workaround ;)
-if sa.__version__.startswith('0.5'):
-    from .dfd042c7 import PickleType
-else:
-    from .a805d4bd import PickleType  # noqa
-
 
 class Task(ResultModelBase):
     """Task result/status."""
@@ -49,7 +44,7 @@ class Task(ResultModelBase):
                 'date_done': self.date_done}
 
     def __repr__(self):
-        return '<Task %s state: %s>' % (self.task_id, self.status)
+        return '<Task {0.task_id} state: {0.status}>'.format(self)
 
 
 class TaskSet(ResultModelBase):
@@ -74,4 +69,4 @@ class TaskSet(ResultModelBase):
                 'date_done': self.date_done}
 
     def __repr__(self):
-        return '<TaskSet: %s>' % (self.taskset_id, )
+        return '<TaskSet: {0.taskset_id}>'.format(self)

+ 2 - 2
celery/backends/redis.py

@@ -58,9 +58,9 @@ class RedisBackend(KeyValueStoreBackend):
 
         # For compatibility with the old REDIS_* configuration keys.
         def _get(key):
-            for prefix in 'CELERY_REDIS_%s', 'REDIS_%s':
+            for prefix in 'CELERY_REDIS_{0}', 'REDIS_{0}':
                 try:
-                    return conf[prefix % key]
+                    return conf[prefix.format(key)]
                 except KeyError:
                     pass
         if host and '://' in host:

+ 13 - 13
celery/beat.py

@@ -15,6 +15,8 @@ import shelve
 import sys
 import traceback
 
+from threading import Event, Thread
+
 from billiard import Process, ensure_multiprocessing
 from kombu.utils import reprcall
 from kombu.utils.functional import maybe_promise
@@ -27,7 +29,6 @@ from .app import app_or_default
 from .schedules import maybe_schedule, crontab
 from .utils import cached_property
 from .utils.imports import instantiate
-from .utils.threads import Event, Thread
 from .utils.timeutils import humanize_seconds
 from .utils.log import get_logger
 
@@ -118,9 +119,8 @@ class ScheduleEntry(object):
         return vars(self).iteritems()
 
     def __repr__(self):
-        return ('<Entry: %s %s {%s}' % (self.name,
-                    reprcall(self.task, self.args or (), self.kwargs or {}),
-                    self.schedule))
+        return '<Entry: {0.name} {call} {0.schedule}'.format(self,
+            call=reprcall(self.task, self.args or (), self.kwargs or {}))
 
 
 class Scheduler(object):
@@ -174,7 +174,7 @@ class Scheduler(object):
             info('Scheduler: Sending due task %s', entry.task)
             try:
                 result = self.apply_async(entry, publisher=publisher)
-            except Exception, exc:
+            except Exception as exc:
                 error('Message Error: %s\n%s',
                       exc, traceback.format_stack(), exc_info=True)
             else:
@@ -203,7 +203,7 @@ class Scheduler(object):
                 (time.time() - self._last_sync) > self.sync_every)
 
     def reserve(self, entry):
-        new_entry = self.schedule[entry.name] = entry.next()
+        new_entry = self.schedule[entry.name] = next(entry)
         return new_entry
 
     def apply_async(self, entry, publisher=None, **kwargs):
@@ -222,10 +222,10 @@ class Scheduler(object):
                 result = self.send_task(entry.task, entry.args, entry.kwargs,
                                         publisher=publisher,
                                         **entry.options)
-        except Exception, exc:
+        except Exception as exc:
             raise SchedulingError, SchedulingError(
-                "Couldn't apply scheduled task %s: %s" % (
-                    entry.name, exc)), sys.exc_info()[2]
+                "Couldn't apply scheduled task {0.name}: {exc}".format(
+                    entry, exc)), sys.exc_info()[2]
         finally:
             if self.should_sync():
                 self._do_sync()
@@ -324,7 +324,7 @@ class PersistentScheduler(Scheduler):
         for suffix in self.known_suffixes:
             try:
                 os.remove(self.schedule_filename + suffix)
-            except OSError, exc:
+            except OSError as exc:
                 if exc.errno != errno.ENOENT:
                     raise
 
@@ -333,7 +333,7 @@ class PersistentScheduler(Scheduler):
             self._store = self.persistence.open(self.schedule_filename,
                                                 writeback=True)
             entries = self._store.setdefault('entries', {})
-        except Exception, exc:
+        except Exception as exc:
             error('Removing corrupted schedule file %r: %r',
                   self.schedule_filename, exc, exc_info=True)
             self._remove_db()
@@ -369,7 +369,7 @@ class PersistentScheduler(Scheduler):
 
     @property
     def info(self):
-        return '    . db -> %s' % (self.schedule_filename, )
+        return '    . db -> {self.schedule_filename}'.format(self=self)
 
 
 class Service(object):
@@ -472,7 +472,7 @@ def EmbeddedService(*args, **kwargs):
     """Return embedded clock service.
 
     :keyword thread: Run threaded instead of as a separate process.
-        Default is :const:`False`.
+        Uses :mod:`multiprocessing` by default, if available.
 
     """
     if kwargs.pop('thread', False) or _Process is None:

+ 12 - 10
celery/bin/base.py

@@ -56,7 +56,7 @@ Daemon Options
     Optional directory to change to after detaching.
 
 """
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
 import os
 import re
@@ -64,6 +64,7 @@ import sys
 import warnings
 
 from collections import defaultdict
+from future_builtins import zip
 from optparse import OptionParser, IndentedHelpFormatter, make_option as Option
 from types import ModuleType
 
@@ -78,7 +79,7 @@ for warning in (CDeprecationWarning, CPendingDeprecationWarning):
     warnings.simplefilter('once', warning, 0)
 
 ARGV_DISABLED = """
-Unrecognized command line arguments: %s
+Unrecognized command line arguments: {0}
 
 Try --help?
 """
@@ -91,7 +92,7 @@ class HelpFormatter(IndentedHelpFormatter):
 
     def format_epilog(self, epilog):
         if epilog:
-            return '\n%s\n\n' % epilog
+            return '\n{0}\n\n'.format(epilog)
         return ''
 
     def format_description(self, description):
@@ -202,7 +203,7 @@ class Command(object):
 
     def usage(self, command):
         """Returns the command-line usage string for this app."""
-        return '%%prog [options] %s' % (self.args, )
+        return '%%prog [options] {0.args}'.format(self)
 
     def get_options(self):
         """Get supported command line options."""
@@ -232,21 +233,21 @@ class Command(object):
             options = dict((k, self.expanduser(v))
                             for k, v in vars(options).iteritems()
                                 if not k.startswith('_'))
-        args = map(self.expanduser, args)
+        args = [self.expanduser(arg) for arg in args]
         self.check_args(args)
         return options, args
 
     def check_args(self, args):
         if not self.supports_args and args:
-            self.die(ARGV_DISABLED % (', '.join(args, )), EX_USAGE)
+            self.die(ARGV_DISABLED.format(', '.join(args)), EX_USAGE)
 
     def die(self, msg, status=EX_FAILURE):
-        sys.stderr.write(msg + '\n')
+        print(msg, file=sys.stderr)
         sys.exit(status)
 
     def early_version(self, argv):
         if '--version' in argv:
-            sys.stdout.write('%s\n' % self.version)
+            print(self.version)
             sys.exit(0)
 
     def parse_options(self, prog_name, arguments):
@@ -272,7 +273,7 @@ class Command(object):
             for long_opt, help in doc.iteritems():
                 option = parser.get_option(long_opt)
                 if option is not None:
-                    option.help = ' '.join(help) % {'default': option.default}
+                    option.help = ' '.join(help).format(default=option.default)
         return parser
 
     def prepare_preload_options(self, options):
@@ -311,7 +312,8 @@ class Command(object):
         sym = self.symbol_by_name(app)
         if isinstance(sym, ModuleType):
             if getattr(sym, '__path__', None):
-                return self.find_app('%s.celery:' % (app.replace(':', ''), ))
+                return self.find_app('{0}.celery:'.format(
+                            app.replace(':', '')))
             return sym.celery
         return sym
 

+ 18 - 19
celery/bin/camqadm.py

@@ -5,13 +5,14 @@ The :program:`celery amqp` command.
 .. program:: celery amqp
 
 """
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
 import cmd
 import sys
 import shlex
 import pprint
 
+from functools import partial
 from itertools import count
 
 from amqplib import client_0_8 as amqp
@@ -35,9 +36,7 @@ Example:
     -> queue.delete myqueue yes no
 """
 
-
-def say(m, fh=sys.stderr):
-    fh.write('%s\n' % (m, ))
+say = partial(print, file=sys.stderr)
 
 
 class Spec(object):
@@ -100,11 +99,11 @@ class Spec(object):
             return response
         if callable(self.returns):
             return self.returns(response)
-        return self.returns % (response, )
+        return self.returns.format(response)
 
     def format_arg(self, name, type, default_value=None):
         if default_value is not None:
-            return '%s:%s' % (name, default_value)
+            return '{0}:{1}'.format(name, default_value)
         return name
 
     def format_signature(self):
@@ -121,7 +120,7 @@ def dump_message(message):
 
 
 def format_declare_queue(ret):
-    return 'ok. queue:%s messages:%s consumers:%s.' % ret
+    return 'ok. queue:{0} messages:{1} consumers:{2}.'.format(*ret)
 
 
 class AMQShell(cmd.Cmd):
@@ -145,7 +144,7 @@ class AMQShell(cmd.Cmd):
     """
     conn = None
     chan = None
-    prompt_fmt = '%d> '
+    prompt_fmt = '{self.counter}> '
     identchars = cmd.IDENTCHARS = '.'
     needs_reconnect = False
     counter = 1
@@ -176,9 +175,9 @@ class AMQShell(cmd.Cmd):
         'queue.delete': Spec(('queue', str),
                              ('if_unused', bool, 'no'),
                              ('if_empty', bool, 'no'),
-                             returns='ok. %d messages deleted.'),
+                             returns='ok. {0} messages deleted.'),
         'queue.purge': Spec(('queue', str),
-                            returns='ok. %d messages deleted.'),
+                            returns='ok. {0} messages deleted.'),
         'basic.get': Spec(('queue', str),
                           ('no_ack', bool, 'off'),
                           returns=dump_message),
@@ -200,10 +199,10 @@ class AMQShell(cmd.Cmd):
     def note(self, m):
         """Say something to the user. Disabled if :attr:`silent`."""
         if not self.silent:
-            say(m, fh=self.out)
+            say(m, file=self.out)
 
     def say(self, m):
-        say(m, fh=self.out)
+        say(m, file=self.out)
 
     def get_amqp_api_command(self, cmd, arglist):
         """With a command name and a list of arguments, convert the arguments
@@ -234,7 +233,7 @@ class AMQShell(cmd.Cmd):
 
     def display_command_help(self, cmd, short=False):
         spec = self.amqp[cmd]
-        self.say('%s %s' % (cmd, spec.format_signature()))
+        self.say('{0} {1}'.format(cmd, spec.format_signature()))
 
     def do_help(self, *args):
         if not args:
@@ -246,7 +245,7 @@ class AMQShell(cmd.Cmd):
             self.display_command_help(args[0])
 
     def default(self, line):
-        self.say("unknown syntax: '%s'. how about some 'help'?" % line)
+        self.say("unknown syntax: {0!r}. how about some 'help'?".format(line))
 
     def get_names(self):
         return set(self.builtins) | set(self.amqp)
@@ -304,9 +303,9 @@ class AMQShell(cmd.Cmd):
             self.counter = self.inc_counter()
             try:
                 self.respond(self.dispatch(cmd, arg))
-            except (AttributeError, KeyError), exc:
+            except (AttributeError, KeyError) as exc:
                 self.default(line)
-            except Exception, exc:
+            except Exception as exc:
                 self.say(exc)
                 self.needs_reconnect = True
 
@@ -326,7 +325,7 @@ class AMQShell(cmd.Cmd):
 
     @property
     def prompt(self):
-        return self.prompt_fmt % self.counter
+        return self.prompt_fmt.format(self=self)
 
 
 class AMQPAdmin(object):
@@ -343,7 +342,7 @@ class AMQPAdmin(object):
         if conn:
             conn.close()
         conn = self.app.connection()
-        self.note('-> connecting to %s.' % conn.as_uri())
+        self.note('-> connecting to {0}.'.format(conn.as_uri()))
         conn.connect()
         self.note('-> connected.')
         return conn
@@ -360,7 +359,7 @@ class AMQPAdmin(object):
 
     def note(self, m):
         if not self.silent:
-            say(m, fh=self.out)
+            say(m, file=self.out)
 
 
 class AMQPAdminCommand(Command):

+ 76 - 72
celery/bin/celery.py

@@ -6,14 +6,14 @@ The :program:`celery` umbrella command.
 .. program:: celery
 
 """
-from __future__ import absolute_import
-from __future__ import with_statement
+from __future__ import absolute_import, print_function
 
 import anyjson
 import sys
 import warnings
 
 from billiard import freeze_support
+from future_builtins import map
 from importlib import import_module
 from pprint import pformat
 
@@ -29,10 +29,15 @@ from celery.bin.base import Command as BaseCommand, Option
 HELP = """
 ---- -- - - ---- Commands- -------------- --- ------------
 
-%(commands)s
+{commands}
 ---- -- - - --------- -- - -------------- --- ------------
 
-Type '%(prog_name)s <command> --help' for help using a specific command.
+Type '{prog_name} <command> --help' for help using a specific command.
+"""
+
+MIGRATE_PROGRESS_FMT = """\
+Migrating task {state.count}/{state.strtotal}: \
+{body[task]}[{body[id]}]\
 """
 
 commands = {}
@@ -62,10 +67,13 @@ class Error(Exception):
         return self.reason
 
 
-def command(fun, name=None, sortpri=0):
-    commands[name or fun.__name__] = fun
-    fun.sortpri = sortpri
-    return fun
+def command(*args, **kwargs):
+
+    def _register(fun):
+        commands[kwargs.get('name') or fun.__name__] = fun
+        return fun
+
+    return _register(args[0]) if args else _register
 
 
 def load_extension_commands(namespace='celery.commands'):
@@ -79,11 +87,13 @@ def load_extension_commands(namespace='celery.commands'):
         sym = ':'.join([ep.module_name, ep.attrs[0]])
         try:
             cls = symbol_by_name(sym)
-        except (ImportError, SyntaxError), exc:
-            warnings.warn('Cannot load extension %r: %r' % (sym, exc))
+        except (ImportError, SyntaxError) as exc:
+            warnings.warn(
+                'Cannot load extension {0!r}: {1!r}'.format(sym, exc))
         else:
             command(cls, name=ep.name)
 
+
 class Command(BaseCommand):
     help = ''
     args = ''
@@ -109,8 +119,8 @@ class Command(BaseCommand):
     def __call__(self, *args, **kwargs):
         try:
             ret = self.run(*args, **kwargs)
-        except Error, exc:
-            self.error(self.colored.red('Error: %s' % exc))
+        except Error as exc:
+            self.error(self.colored.red('Error: {0!r}'.format(exc)))
             return exc.status
 
         return ret if ret is not None else EX_OK
@@ -123,10 +133,7 @@ class Command(BaseCommand):
         self.out(s, fh=self.stderr)
 
     def out(self, s, fh=None):
-        s = str(s)
-        if not s.endswith('\n'):
-            s += '\n'
-        (fh or self.stdout).write(s)
+        print(s, file=fh or self.stdout)
 
     def run_from_argv(self, prog_name, argv):
         self.prog_name = prog_name
@@ -141,13 +148,13 @@ class Command(BaseCommand):
         return self(*args, **options)
 
     def usage(self, command):
-        return '%%prog %s [options] %s' % (command, self.args)
+        return '%%prog {0} [options] {self.args}'.format(command, self=self)
 
     def prettify_list(self, n):
         c = self.colored
         if not n:
             return '- empty -'
-        return '\n'.join(str(c.reset(c.white('*'), ' %s' % (item, )))
+        return '\n'.join(str(c.reset(c.white('*'), ' {0}'.format(item)))
                             for item in n)
 
     def prettify_dict_ok_error(self, n):
@@ -213,6 +220,7 @@ class Delegate(Command):
         return self.target.run(*args, **kwargs)
 
 
+@command
 class multi(Command):
     """Start multiple worker instances."""
 
@@ -222,9 +230,9 @@ class multi(Command):
     def run_from_argv(self, prog_name, argv):
         from celery.bin.celeryd_multi import MultiTool
         return MultiTool().execute_from_commandline(argv, prog_name)
-multi = command(multi)
 
 
+@command
 class worker(Delegate):
     """Start worker instance.
 
@@ -239,9 +247,9 @@ class worker(Delegate):
         celery worker --autoscale=10,0
     """
     Command = 'celery.bin.celeryd:WorkerCommand'
-worker = command(worker, sortpri=01)
 
 
+@command
 class events(Delegate):
     """Event-stream utilities.
 
@@ -262,9 +270,9 @@ class events(Delegate):
         celery events -C mod.attr -F 1.0 --detach --maxrate=100/m -l info
     """
     Command = 'celery.bin.celeryev:EvCommand'
-events = command(events, sortpri=10)
 
 
+@command
 class beat(Delegate):
     """Start the celerybeat periodic task scheduler.
 
@@ -276,9 +284,9 @@ class beat(Delegate):
 
     """
     Command = 'celery.bin.celerybeat:BeatCommand'
-beat = command(beat, sortpri=20)
 
 
+@command
 class amqp(Delegate):
     """AMQP Administration Shell.
 
@@ -297,9 +305,9 @@ class amqp(Delegate):
 
     """
     Command = 'celery.bin.camqadm:AMQPAdminCommand'
-amqp = command(amqp, sortpri=30)
 
 
+@command(name='list')
 class list_(Command):
     """Get info from broker.
 
@@ -317,8 +325,7 @@ class list_(Command):
         except NotImplementedError:
             raise Error('Your transport cannot list bindings.')
 
-        fmt = lambda q, e, r: self.out('%s %s %s' % (q.ljust(28),
-                                                     e.ljust(28), r))
+        fmt = lambda q, e, r: self.out('{0:<28} {1:<28} {2}'.format(q, e, r))
         fmt('Queue', 'Exchange', 'Routing Key')
         fmt('-' * 16, '-' * 16, '-' * 16)
         for b in bindings:
@@ -328,16 +335,16 @@ class list_(Command):
         topics = {'bindings': self.list_bindings}
         available = ', '.join(topics.keys())
         if not what:
-            raise Error('You must specify what to list (%s)' % available)
+            raise Error('You must specify one of {0}'.format(available))
         if what not in topics:
-            raise Error('unknown topic %r (choose one of: %s)' % (
+            raise Error('unknown topic {0!r} (choose one of: {1})'.format(
                             what, available))
         with self.app.connection() as conn:
             self.app.amqp.TaskConsumer(conn).declare()
             topics[what](conn.manager)
-list_ = command(list_, 'list')
 
 
+@command
 class call(Command):
     """Call a task by name.
 
@@ -391,28 +398,29 @@ class call(Command):
                                  eta=maybe_iso8601(kw.get('eta')),
                                  expires=expires)
         self.out(res.id)
-call = command(call)
 
 
+@command
 class purge(Command):
     """Erase all messages from all known task queues.
 
     WARNING: There is no undo operation for this command.
 
     """
+    fmt_purged = 'Purged {mnum} {messages} from {qnum} known task {queues}.'
+    fmt_empty = 'No messages purged from {qnum} {queues}'
+
     def run(self, *args, **kwargs):
         queues = len(self.app.amqp.queues.keys())
-        messages_removed = self.app.control.purge()
-        if messages_removed:
-            self.out('Purged %s %s from %s known task %s.' % (
-                messages_removed, text.pluralize(messages_removed, 'message'),
-                queues, text.pluralize(queues, 'queue')))
-        else:
-            self.out('No messages purged from %s known %s' % (
-                queues, text.pluralize(queues, 'queue')))
-purge = command(purge)
+        messages = self.app.control.purge()
+        fmt = self.fmt_purged if messages else self.fmt_empty
+        self.out(fmt.format(
+            mnum=messages, qnum=queues,
+            messages=text.pluralize(messages, 'message'),
+            queues=text.pluralize(queues, 'queue')))
 
 
+@command
 class result(Command):
     """Gives the return value for a given task id.
 
@@ -443,7 +451,6 @@ class result(Command):
         else:
             value = result.get()
         self.out(self.prettify(value)[1])
-result = command(result)
 
 
 class _RemoteControl(Command):
@@ -467,8 +474,8 @@ class _RemoteControl(Command):
             # see if it uses args.
             meth = getattr(self, command)
             return text.join([
-                '|' + text.indent('%s%s %s' % (prefix, color(command),
-                                               meth.__doc__), indent), help,
+                '|' + text.indent('{0}{1} {2}'.format(prefix, color(command),
+                                                meth.__doc__), indent), help,
             ])
 
         except AttributeError:
@@ -491,7 +498,7 @@ class _RemoteControl(Command):
         ])
 
     def usage(self, command):
-        return '%%prog %s [options] %s <command> [arg1 .. argN]' % (
+        return '%%prog {0} [options] {1} <command> [arg1 .. argN]'.format(
                 command, self.args)
 
     def call(self, *args, **kwargs):
@@ -499,30 +506,29 @@ class _RemoteControl(Command):
 
     def run(self, *args, **kwargs):
         if not args:
-            raise Error('Missing %s method. See --help' % self.name)
+            raise Error('Missing {0.name} method. See --help'.format(self))
         return self.do_call_method(args, **kwargs)
 
     def do_call_method(self, args, **kwargs):
         method = args[0]
         if method == 'help':
-            raise Error("Did you mean '%s --help'?" % self.name)
+            raise Error("Did you mean '{0.name} --help'?".format(self))
         if method not in self.choices:
-            raise Error('Unknown %s method %s' % (self.name, method))
+            raise Error('Unknown {0.name} method {1}'.format(self, method))
 
         destination = kwargs.get('destination')
         timeout = kwargs.get('timeout') or self.choices[method][0]
         if destination and isinstance(destination, basestring):
-            destination = map(str.strip, destination.split(','))
+            destination = list(map(str.strip, destination.split(',')))
 
         try:
             handler = getattr(self, method)
         except AttributeError:
             handler = self.call
 
-        # XXX Python 2.5 does not support X(*args, foo=1)
-        kwargs = {"timeout": timeout, "destination": destination,
-                  "callback": self.say_remote_command_reply}
-        replies = handler(method, *args[1:], **kwargs)
+        replies = handler(method, *args[1:], timeout=timeout,
+                          destination=destination,
+                          callback=self.say_remote_command_reply)
         if not replies:
             raise Error('No nodes replied within time constraint.',
                         status=EX_UNAVAILABLE)
@@ -538,6 +544,7 @@ class _RemoteControl(Command):
             self.out(body)
 
 
+@command
 class inspect(_RemoteControl):
     """Inspect the worker at runtime.
 
@@ -566,9 +573,9 @@ class inspect(_RemoteControl):
     def call(self, method, *args, **options):
         i = self.app.control.inspect(**options)
         return getattr(i, method)(*args)
-inspect = command(inspect)
 
 
+@command
 class control(_RemoteControl):
     """Workers remote control.
 
@@ -602,9 +609,7 @@ class control(_RemoteControl):
     }
 
     def call(self, method, *args, **options):
-        # XXX Python 2.5 doesn't support X(*args, reply=True, **kwargs)
-        return getattr(self.app.control, method)(
-                *args, **dict(options, retry=True))
+        return getattr(self.app.control, method)(*args, retry=True, **options)
 
     def pool_grow(self, method, n=1, **kwargs):
         """[N=1]"""
@@ -635,9 +640,9 @@ class control(_RemoteControl):
     def cancel_consumer(self, method, queue, **kwargs):
         """<queue>"""
         return self.call(method, queue, reply=True, **kwargs)
-control = command(control)
 
 
+@command
 class status(Command):
     """Show list of workers that are online."""
     option_list = inspect.option_list
@@ -653,11 +658,11 @@ class status(Command):
                         status=EX_UNAVAILABLE)
         nodecount = len(replies)
         if not kwargs.get('quiet', False):
-            self.out('\n%s %s online.' % (nodecount,
-                                          text.pluralize(nodecount, 'node')))
-status = command(status)
+            self.out('\n{0} {1} online.'.format(
+                nodecount, text.pluralize(nodecount, 'node')))
 
 
+@command
 class migrate(Command):
     """Migrate tasks from one broker to another.
 
@@ -684,10 +689,10 @@ class migrate(Command):
             Option('--forever', '-F', action='store_true',
                     help='Continually migrate tasks until killed.'),
     )
+    progress_fmt = MIGRATE_PROGRESS_FMT
 
     def on_migrate_task(self, state, body, message):
-        self.out('Migrating task %s/%s: %s[%s]' % (
-            state.count, state.strtotal, body['task'], body['id']))
+        self.out(self.progress_fmt.format(state=state, body=body))
 
     def run(self, *args, **kwargs):
         if len(args) != 2:
@@ -699,9 +704,9 @@ class migrate(Command):
                       Connection(args[1]),
                       callback=self.on_migrate_task,
                       **kwargs)
-migrate = command(migrate)
 
 
+@command
 class shell(Command):  # pragma: no cover
     """Start shell session with convenient access to celery symbols.
 
@@ -811,31 +816,29 @@ class shell(Command):  # pragma: no cover
         import bpython
         bpython.embed(self.locals)
 
-shell = command(shell)
-
 
+@command
 class help(Command):
     """Show help screen and exit."""
 
     def usage(self, command):
-        return '%%prog <command> [options] %s' % (self.args, )
+        return '%%prog <command> [options] {0.args}'.format(self)
 
     def run(self, *args, **kwargs):
         self.parser.print_help()
-        self.out(HELP % {'prog_name': self.prog_name,
-                         'commands': CeleryCommand.list_commands()})
+        self.out(HELP.format(prog_name=self.prog_name,
+                             commands=CeleryCommand.list_commands()))
 
         return EX_USAGE
-help = command(help)
 
 
+@command
 class report(Command):
     """Shows information useful to include in bugreports."""
 
     def run(self, *args, **kwargs):
         self.out(self.app.bugreport())
         return EX_OK
-report = command(report)
 
 
 class CeleryCommand(BaseCommand):
@@ -888,12 +891,13 @@ class CeleryCommand(BaseCommand):
     def get_command_info(self, command, indent=0, color=None):
         colored = term.colored().names[color] if color else lambda x: x
         obj = self.commands[command]
+        cmd = 'celery {0}'.format(colored(command))
         if obj.leaf:
-            return '|' + text.indent('celery %s' % colored(command), indent)
+            return '|' + text.indent(cmd, indent)
         return text.join([
             ' ',
-            '|' + text.indent('celery %s --help' % colored(command), indent),
-            obj.list_commands(indent, 'celery %s' % command, colored),
+            '|' + text.indent('{0} --help'.format(cmd), indent),
+            obj.list_commands(indent, 'celery {0}'.format(command), colored),
         ])
 
     @classmethod
@@ -902,7 +906,7 @@ class CeleryCommand(BaseCommand):
         ret = []
         for cls, commands, color in command_classes:
             ret.extend([
-                text.indent('+ %s: ' % white(cls), indent),
+                text.indent('+ {0}: '.format(white(cls)), indent),
                 '\n'.join(self.get_command_info(command, indent + 4, color)
                             for command in commands),
                 ''

+ 1 - 2
celery/bin/celerybeat.py

@@ -17,7 +17,7 @@ The :program:`celery beat` command.
 
     Path to the schedule database. Defaults to `celerybeat-schedule`.
     The extension '.db' may be appended to the filename.
-    Default is %(default)s.
+    Default is {default}.
 
 .. cmdoption:: -S, --scheduler
 
@@ -38,7 +38,6 @@ The :program:`celery beat` command.
     `ERROR`, `CRITICAL`, or `FATAL`.
 
 """
-from __future__ import with_statement
 from __future__ import absolute_import
 
 import os

+ 2 - 2
celery/bin/celeryd.py

@@ -62,7 +62,7 @@ The :program:`celery worker` command (previously known as ``celeryd``)
 .. cmdoption:: -S, --statedb
 
     Path to the state database. The extension '.db' may
-    be appended to the filename. Default: %(default)s
+    be appended to the filename. Default: {default}
 
 .. cmdoption:: -E, --events
 
@@ -149,7 +149,7 @@ class WorkerCommand(Command):
             try:
                 kwargs['loglevel'] = mlevel(loglevel)
             except KeyError:  # pragma: no cover
-                self.die('Unknown level %r. Please use one of %s.' % (
+                self.die('Unknown level {0!r}. Please use one of {1}.'.format(
                     loglevel, '|'.join(l for l in LOG_LEVELS.keys()
                       if isinstance(l, basestring))))
         return self.app.Worker(**kwargs).run()

+ 5 - 6
celery/bin/celeryd_detach.py

@@ -11,7 +11,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import celery
 import os
@@ -73,9 +72,9 @@ class PartialOptionParser(OptionParser):
                 nargs = option.nargs
                 if len(rargs) < nargs:
                     if nargs == 1:
-                        self.error('%s option requires an argument' % opt)
+                        self.error('{0} requires an argument'.format(opt))
                     else:
-                        self.error('%s option requires %d arguments' % (
+                        self.error('{0} requires {1} arguments'.format(
                                     opt, nargs))
                 elif nargs == 1:
                     value = rargs.pop(0)
@@ -84,7 +83,7 @@ class PartialOptionParser(OptionParser):
                     del rargs[0:nargs]
 
             elif had_explicit_value:
-                self.error('%s option does not take a value' % opt)
+                self.error('{0} option does not take a value'.format(opt))
             else:
                 value = None
             option.process(opt, value, values, self)
@@ -122,9 +121,9 @@ class detached_celeryd(object):
         parser = self.Parser(prog_name)
         options, values = parser.parse_args(argv)
         if options.logfile:
-            parser.leftovers.append('--logfile=%s' % (options.logfile, ))
+            parser.leftovers.append('--logfile={0}'.format(options.logfile))
         if options.pidfile:
-            parser.leftovers.append('--pidfile=%s' % (options.pidfile, ))
+            parser.leftovers.append('--pidfile={0}'.format(options.pidfile))
         return options, values, parser.leftovers
 
     def execute_from_commandline(self, argv=None):

+ 33 - 33
celery/bin/celeryd_multi.py

@@ -88,7 +88,7 @@ Examples
     celeryd -n xuzzy.myhost -c 3
 
 """
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
 import errno
 import os
@@ -97,6 +97,7 @@ import socket
 import sys
 
 from collections import defaultdict
+from future_builtins import map
 from subprocess import Popen
 from time import sleep
 
@@ -113,16 +114,16 @@ SIGNAMES = set(sig for sig in dir(signal)
 SIGMAP = dict((getattr(signal, name), name) for name in SIGNAMES)
 
 USAGE = """\
-usage: %(prog_name)s start <node1 node2 nodeN|range> [celeryd options]
-       %(prog_name)s stop <n1 n2 nN|range> [-SIG (default: -TERM)]
-       %(prog_name)s restart <n1 n2 nN|range> [-SIG] [celeryd options]
-       %(prog_name)s kill <n1 n2 nN|range>
+usage: {prog_name} start <node1 node2 nodeN|range> [celeryd options]
+       {prog_name} stop <n1 n2 nN|range> [-SIG (default: -TERM)]
+       {prog_name} restart <n1 n2 nN|range> [-SIG] [celeryd options]
+       {prog_name} kill <n1 n2 nN|range>
 
-       %(prog_name)s show <n1 n2 nN|range> [celeryd options]
-       %(prog_name)s get hostname <n1 n2 nN|range> [-qv] [celeryd options]
-       %(prog_name)s names <n1 n2 nN|range>
-       %(prog_name)s expand template <n1 n2 nN|range>
-       %(prog_name)s help
+       {prog_name} show <n1 n2 nN|range> [celeryd options]
+       {prog_name} get hostname <n1 n2 nN|range> [-qv] [celeryd options]
+       {prog_name} names <n1 n2 nN|range>
+       {prog_name} expand template <n1 n2 nN|range>
+       {prog_name} help
 
 additional options (must appear after command name):
 
@@ -182,12 +183,12 @@ class MultiTool(object):
         try:
             self.commands[argv[0]](argv[1:], cmd)
         except KeyError:
-            self.error('Invalid command: %s' % argv[0])
+            self.error('Invalid command: {0}'.format(argv[0]))
 
         return self.retcode
 
     def say(self, m, newline=True):
-        self.fh.write('%s%s' % (m, '\n' if newline else ''))
+        print(m, file=self.fh, end='\n' if newline else '')
 
     def names(self, argv, cmd):
         p = NamespacedOptionParser(argv)
@@ -215,7 +216,7 @@ class MultiTool(object):
         retcodes = []
         self.note('> Starting nodes...')
         for nodename, argv, _ in multi_args(p, cmd):
-            self.note('\t> %s: ' % (nodename, ), newline=False)
+            self.note('\t> {0}: '.format(nodename), newline=False)
             retcode = self.waitexec(argv)
             self.note(retcode and self.FAILED or self.OK)
             retcodes.append(retcode)
@@ -229,10 +230,10 @@ class MultiTool(object):
     def signal_node(self, nodename, pid, sig):
         try:
             os.kill(pid, sig)
-        except OSError, exc:
+        except OSError as exc:
             if exc.errno != errno.ESRCH:
                 raise
-            self.note('Could not signal %s (%s): No such process' % (
+            self.note('Could not signal {0} ({1}): No such process'.format(
                         nodename, pid))
             return False
         return True
@@ -240,7 +241,7 @@ class MultiTool(object):
     def node_alive(self, pid):
         try:
             os.kill(pid, 0)
-        except OSError, exc:
+        except OSError as exc:
             if exc.errno == errno.ESRCH:
                 return False
             raise
@@ -261,16 +262,15 @@ class MultiTool(object):
         for node in list(P):
             if node in P:
                 nodename, _, pid = node
-                self.note('\t> %s: %s -> %s' % (nodename,
-                                                SIGMAP[sig][3:],
-                                                pid))
+                self.note('\t> {0}: {1} -> {2}'.format(
+                    nodename, SIGMAP[sig][3:], pid))
                 if not self.signal_node(nodename, pid, sig):
                     on_down(node)
 
         def note_waiting():
             left = len(P)
             if left:
-                self.note(self.colored.blue('> Waiting for %s %s...' % (
+                self.note(self.colored.blue('> Waiting for {0} {1}...'.format(
                     left, pluralize(left, 'node'))), newline=False)
 
         if retry:
@@ -282,7 +282,7 @@ class MultiTool(object):
                     self.note('.', newline=False)
                     nodename, _, pid = node
                     if not self.node_alive(pid):
-                        self.note('\n\t> %s: %s' % (nodename, self.OK))
+                        self.note('\n\t> {0}: {1}'.format(nodename, self.OK))
                         on_down(node)
                         note_waiting()
                         break
@@ -304,7 +304,7 @@ class MultiTool(object):
             if pid:
                 nodes.append((nodename, tuple(argv), pid))
             else:
-                self.note('> %s: %s' % (nodename, self.DOWN))
+                self.note('> {0}: {1}'.format(nodename, self.DOWN))
                 if callback:
                     callback(nodename, argv, pid)
 
@@ -314,7 +314,7 @@ class MultiTool(object):
         self.splash()
         p = NamespacedOptionParser(argv)
         for nodename, _, pid in self.getpids(p, cmd):
-            self.note('Killing node %s (%s)' % (nodename, pid))
+            self.note('Killing node {0} ({1})'.format(nodename, pid))
             self.signal_node(nodename, pid, signal.SIGKILL)
 
     def stop(self, argv, cmd, retry=None, callback=None):
@@ -337,7 +337,7 @@ class MultiTool(object):
 
         def on_node_shutdown(nodename, argv, pid):
             self.note(self.colored.blue(
-                '> Restarting node %s: ' % nodename), newline=False)
+                '> Restarting node {0}: '.format(nodename)), newline=False)
             retval = self.waitexec(argv)
             self.note(retval and self.FAILED or self.OK)
             retvals.append(retval)
@@ -362,24 +362,24 @@ class MultiTool(object):
 
     def usage(self):
         self.splash()
-        self.say(USAGE % {'prog_name': self.prog_name})
+        self.say(USAGE.format(prog_name=self.prog_name))
 
     def splash(self):
         if not self.nosplash:
             c = self.colored
-            self.note(c.cyan('celeryd-multi v%s' % VERSION_BANNER))
+            self.note(c.cyan('celeryd-multi v{0}'.format(VERSION_BANNER)))
 
     def waitexec(self, argv, path=sys.executable):
         args = ' '.join([path] + list(argv))
         argstr = shellsplit(from_utf8(args))
         pipe = Popen(argstr, env=self.env)
-        self.info('  %s' % ' '.join(argstr))
+        self.info('  {0}'.format(' '.join(argstr)))
         retcode = pipe.wait()
         if retcode < 0:
-            self.note('* Child was terminated by signal %s' % (-retcode, ))
+            self.note('* Child was terminated by signal {0}'.format(-retcode))
             return -retcode
         elif retcode > 0:
-            self.note('* Child terminated with failure code %s' % (retcode, ))
+            self.note('* Child terminated with errorcode {0}'.format(retcode))
         return retcode
 
     def error(self, msg=None):
@@ -425,7 +425,7 @@ def multi_args(p, cmd='celeryd', append='', prefix='', suffix=''):
         except ValueError:
             pass
         else:
-            names = map(str, range(1, noderange + 1))
+            names = list(map(str, range(1, noderange + 1)))
             prefix = 'celery'
     cmd = options.pop('--cmd', cmd)
     append = options.pop('--append', append)
@@ -517,8 +517,8 @@ def format_opt(opt, value):
     if not value:
         return opt
     if opt.startswith('--'):
-        return '%s=%s' % (opt, value)
-    return '%s %s' % (opt, value)
+        return '{0}={1}'.format(opt, value)
+    return '{0} {1}'.format(opt, value)
 
 
 def parse_ns_range(ns, ranges=False):
@@ -526,7 +526,7 @@ def parse_ns_range(ns, ranges=False):
     for space in ',' in ns and ns.split(',') or [ns]:
         if ranges and '-' in space:
             start, stop = space.split('-')
-            x = map(str, range(int(start), int(stop) + 1))
+            x = list(map(str, range(int(start), int(stop) + 1)))
             ret.extend(x)
         else:
             ret.append(space)

+ 2 - 3
celery/bin/celeryev.py

@@ -36,7 +36,6 @@ The :program:`celery events` command.
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import os
 import sys
@@ -100,8 +99,8 @@ class EvCommand(Command):
             return cam()
 
     def set_process_status(self, prog, info=''):
-        prog = '%s:%s' % (self.prog_name, prog)
-        info = '%s %s' % (info, strargv(sys.argv))
+        prog = '{0}:{1}'.format(self.prog_name, prog)
+        info = '{0} {1}'.format(info, strargv(sys.argv))
         return set_process_title(prog, info=info)
 
     def get_options(self):

+ 8 - 6
celery/canvas.py

@@ -11,6 +11,7 @@
 """
 from __future__ import absolute_import
 
+from future_builtins import map
 from operator import itemgetter
 from itertools import chain as _chain
 
@@ -19,7 +20,6 @@ from kombu.utils import fxrange, kwdict, reprcall
 from celery import current_app
 from celery.local import Proxy
 from celery.utils import cached_property, uuid
-from celery.utils.compat import chain_from_iterable
 from celery.utils.functional import (
     maybe_list, is_list, regen,
     chunks as _chunks,
@@ -161,7 +161,7 @@ class Signature(dict):
         return self.append_to_list_option('link_error', errback)
 
     def flatten_links(self):
-        return list(chain_from_iterable(_chain([[self]],
+        return list(_chain.from_iterable(_chain([[self]],
                 (link.flatten_links()
                     for link in maybe_list(self.options.get('link')) or []))))
 
@@ -245,7 +245,8 @@ class xmap(_basemap):
 
     def __repr__(self):
         task, it = self._unpack_args(self.kwargs)
-        return '[%s(x) for x in %s]' % (task.task, truncate(repr(it), 100))
+        return '[{0}(x) for x in {1}]'.format(task.task,
+                                              truncate(repr(it), 100))
 Signature.register_type(xmap)
 
 
@@ -254,7 +255,8 @@ class xstarmap(_basemap):
 
     def __repr__(self):
         task, it = self._unpack_args(self.kwargs)
-        return '[%s(*x) for x in %s]' % (task.task, truncate(repr(it), 100))
+        return '[{0}(*x) for x in {1}]'.format(task.task,
+                                               truncate(repr(it), 100))
 Signature.register_type(xstarmap)
 
 
@@ -310,7 +312,7 @@ class group(Signature):
 
     def __call__(self, *partial_args, **options):
         tasks, result, gid, args = self.type.prepare(options,
-                    map(Signature.clone, self.tasks), partial_args)
+                    [Signature.clone(t) for t in self.tasks], partial_args)
         return self.type(tasks, result, gid, args)
 
     def skew(self, start=1.0, stop=None, step=1.0):
@@ -371,7 +373,7 @@ class chord(Signature):
     def __repr__(self):
         if self.body:
             return self.body.reprcall(self.tasks)
-        return '<chord without body: %r>' % (self.tasks, )
+        return '<chord without body: {0.tasks!r}>'.format(self)
 
     @property
     def tasks(self):

+ 2 - 2
celery/concurrency/base.py

@@ -91,11 +91,11 @@ class BasePool(object):
 
     def terminate_job(self, pid):
         raise NotImplementedError(
-                '%s does not implement kill_job' % (self.__class__, ))
+                '{0} does not implement kill_job'.format(type(self)))
 
     def restart(self):
         raise NotImplementedError(
-                '%s does not implement restart' % (self.__class__, ))
+                '{0} does not implement restart'.format(type(self)))
 
     def stop(self):
         self.on_stop()

+ 3 - 3
celery/contrib/batches.py

@@ -21,7 +21,7 @@ A click counter that flushes the buffer every 100 messages, and every
         from collections import Counter
         count = Counter(request.kwargs['url'] for request in requests)
         for url, count in count.items():
-            print('>>> Clicks: %s -> %s' % (url, count))
+            print('>>> Clicks: {0} -> {1}'.format(url, count))
 
 Registering the click is done as follows:
 
@@ -79,7 +79,7 @@ def apply_batches_task(task, args, loglevel, logfile):
     task.push_request(loglevel=loglevel, logfile=logfile)
     try:
         result = task(*args)
-    except Exception, exc:
+    except Exception as exc:
         result = None
         logger.error('Error: %r', exc, exc_info=True)
     finally:
@@ -139,7 +139,7 @@ class Batches(Task):
         self._logging = None
 
     def run(self, requests):
-        raise NotImplementedError('%r must implement run(requests)' % (self, ))
+        raise NotImplementedError('must implement run(requests)')
 
     def flush(self, requests):
         return self.apply_buffer(requests, ([SimpleRequest.from_request(r)

+ 15 - 11
celery/contrib/migrate.py

@@ -6,12 +6,11 @@
     Migration tools.
 
 """
-from __future__ import absolute_import
-from __future__ import with_statement
+from __future__ import absolute_import, print_function
 
 import socket
 
-from functools import partial, wraps
+from functools import partial
 from itertools import cycle, islice
 
 from kombu import eventloop, Queue
@@ -23,6 +22,12 @@ from celery.app import app_or_default
 from celery.utils import worker_direct
 
 
+MOVING_PROGRESS_FMT = """\
+Moving task {state.filtered}/{state.strtotal}: \
+{body[task]}[{body[id]}]\
+"""
+
+
 class StopFiltering(Exception):
     pass
 
@@ -40,8 +45,8 @@ class State(object):
 
     def __repr__(self):
         if self.filtered:
-            return '^%s' % self.filtered
-        return '%s/%s' % (self.count, self.strtotal)
+            return '^{0.filtered}'.format(self)
+        return '{0.count}/{0.strtotal}'.format(self)
 
 
 def republish(producer, message, exchange=None, routing_key=None,
@@ -177,7 +182,7 @@ def move(predicate, connection=None, exchange=None, routing_key=None,
     """
     app = app_or_default(app)
     queues = [_maybe_queue(app, queue) for queue in source or []] or None
-    with app.default_connection(connection, pool=False) as conn:
+    with app.connection_or_acquire(connection, pool=False) as conn:
         producer = app.amqp.TaskProducer(conn)
         state = State()
 
@@ -345,12 +350,11 @@ def move_by_taskmap(map, **kwargs):
     return move(task_name_in_map, **kwargs)
 
 
+def filter_status(state, body, message, **kwargs):
+    print(MOVING_PROGRESS_FMT.format(state=state, body=body, **kwargs))
+
+
 move_direct = partial(move, transform=worker_direct)
 move_direct_by_id = partial(move_task_by_id, transform=worker_direct)
 move_direct_by_idmap = partial(move_by_idmap, transform=worker_direct)
 move_direct_by_taskmap = partial(move_by_taskmap, transform=worker_direct)
-
-
-def filter_status(state, body, message):
-    print('Moving task %s/%s: %s[%s]' % (
-            state.filtered, state.strtotal, body['task'], body['id']))

+ 30 - 15
celery/contrib/rdb.py

@@ -34,13 +34,14 @@ Inspired by http://snippets.dzone.com/posts/show/7248
     base port.  The selected port will be logged by the worker.
 
 """
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
 import errno
 import os
 import socket
 import sys
 
+from future_builtins import map
 from pdb import Pdb
 
 from billiard import current_process
@@ -55,6 +56,23 @@ _current = [None]
 
 _frame = getattr(sys, '_getframe')
 
+NO_AVAILABLE_PORT = """\
+{self.ident}: Couldn't find an available port.
+
+Please specify one using the CELERY_RDB_PORT environment variable.
+"""
+
+BANNER = """\
+{self.ident}: Please telnet into {self.host} {self.port}.
+
+Type `exit` in session to continue.
+
+{self.ident}: Waiting for client...
+"""
+
+SESSION_STARTED = '{self.ident}: Now in session with {self.remote_addr}.'
+SESSION_ENDED = '{self.ident}: Session with {self.remote_addr} ended.'
+
 
 class Rdb(Pdb):
     me = 'Remote Debugger'
@@ -71,15 +89,14 @@ class Rdb(Pdb):
         self._sock, this_port = self.get_avail_port(host, port,
             port_search_limit, port_skew)
         self._sock.listen(1)
-        me = '%s:%s' % (self.me, this_port)
-        context = self.context = {'me': me, 'host': host, 'port': this_port}
-        self.say('%(me)s: Please telnet %(host)s %(port)s.'
-                 '  Type `exit` in session to continue.' % context)
-        self.say('%(me)s: Waiting for client...' % context)
+        self.ident = '{0}:{1}'.format(self.me, this_port)
+        self.host = host
+        self.port = this_port
+        self.say(BANNER.format(self=self))
 
         self._client, address = self._sock.accept()
-        context['remote_addr'] = ':'.join(map(str, address))
-        self.say('%(me)s: In session with %(remote_addr)s' % context)
+        self.remote_addr = ':'.join(map(str, address))
+        self.say(SESSION_STARTED.format(self=self))
         self._handle = sys.stdin = sys.stdout = self._client.makefile('rw')
         Pdb.__init__(self, completekey='tab',
                            stdin=self._handle, stdout=self._handle)
@@ -96,19 +113,17 @@ class Rdb(Pdb):
             this_port = port + skew + i
             try:
                 _sock.bind((host, this_port))
-            except socket.error, exc:
+            except socket.error as exc:
                 if exc.errno in [errno.EADDRINUSE, errno.EINVAL]:
                     continue
                 raise
             else:
                 return _sock, this_port
         else:
-            raise Exception(
-                '%s: Could not find available port. Please set using '
-                'environment variable CELERY_RDB_PORT' % (self.me, ))
+            raise Exception(NO_AVAILABLE_PORT.format(self=self))
 
     def say(self, m):
-        self.out.write(m + '\n')
+        print(m, file=self.out)
 
     def _close_session(self):
         self.stdin, self.stdout = sys.stdin, sys.stdout = self._prev_handles
@@ -116,7 +131,7 @@ class Rdb(Pdb):
         self._client.close()
         self._sock.close()
         self.active = False
-        self.say('%(me)s: Session %(remote_addr)s ended.' % self.context)
+        self.say(SESSION_ENDED.format(self=self))
 
     def do_continue(self, arg):
         self._close_session()
@@ -135,7 +150,7 @@ class Rdb(Pdb):
             frame = _frame().f_back
         try:
             Pdb.set_trace(self, frame)
-        except socket.error, exc:
+        except socket.error as exc:
             # connection reset by peer.
             if exc.errno != errno.ECONNRESET:
                 raise

+ 13 - 11
celery/datastructures.py

@@ -6,13 +6,13 @@
     Custom types and data structures.
 
 """
-from __future__ import absolute_import
-from __future__ import with_statement
+from __future__ import absolute_import, print_function
 
 import sys
 import time
 
 from collections import defaultdict
+from functools import partial
 from itertools import chain
 
 from billiard.einfo import ExceptionInfo  # noqa
@@ -164,13 +164,14 @@ class DependencyGraph(object):
         :param fh: A file, or a file-like object to write the graph to.
 
         """
-        fh.write('digraph dependencies {\n')
+        P = partial(print, file=fh)
+        P('digraph dependencies {')
         for obj, adjacent in self.iteritems():
             if not adjacent:
-                fh.write(ws + '"%s"\n' % (obj, ))
+                P(ws + '"{0}"'.format(obj))
             for req in adjacent:
-                fh.write(ws + '"%s" -> "%s"\n' % (obj, req))
-        fh.write('}\n')
+                P(ws + '"{0}" -> "{1}"'.format(obj, req))
+        P('}')
 
     def __iter__(self):
         return self.adjacent.iterkeys()
@@ -191,11 +192,11 @@ class DependencyGraph(object):
     def __repr__(self):
         return '\n'.join(self.repr_node(N) for N in self)
 
-    def repr_node(self, obj, level=1):
-        output = ['%s(%s)' % (obj, self.valency_of(obj))]
+    def repr_node(self, obj, level=1, fmt='{0}({1})'):
+        output = [fmt.format(obj, self.valency_of(obj))]
         if obj in self:
             for other in self[obj]:
-                d = '%s(%s)' % (other, self.valency_of(other))
+                d = fmt.format(other, self.valency_of(other))
                 output.append('     ' * level + d)
                 output.extend(self.repr_node(other, level + 1).split('\n')[1:])
         return '\n'.join(output)
@@ -214,7 +215,8 @@ class AttributeDictMixin(object):
             return self[k]
         except KeyError:
             raise AttributeError(
-                "'%s' object has no attribute '%s'" % (type(self).__name__, k))
+                "{0!r} object has no attribute {1!r}".format(
+                    type(self).__name__, k))
 
     def __setattr__(self, key, value):
         """`d[key] = value -> d.key = value`"""
@@ -433,7 +435,7 @@ class LimitedSet(object):
         return iter(self._data)
 
     def __repr__(self):
-        return 'LimitedSet(%r)' % (self._data.keys(), )
+        return 'LimitedSet({0!r})'.format(self._data.keys())
 
     @property
     def chronologically(self):

+ 13 - 21
celery/events/__init__.py

@@ -9,7 +9,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import time
 import socket
@@ -20,6 +19,7 @@ from contextlib import contextmanager
 from copy import copy
 
 from kombu import eventloop, Exchange, Queue, Consumer, Producer
+from kombu.mixins import ConsumerMixin
 from kombu.utils import cached_property
 
 from celery.app import app_or_default
@@ -132,7 +132,7 @@ class EventDispatcher(object):
                 try:
                     self.publisher.publish(event,
                                            routing_key=type.replace('-', '.'))
-                except Exception, exc:
+                except Exception as exc:
                     if not self.buffer_while_offline:
                         raise
                     self._outbound_buffer.append((type, fields, exc))
@@ -154,7 +154,7 @@ class EventDispatcher(object):
         self.publisher = None
 
 
-class EventReceiver(object):
+class EventReceiver(ConsumerMixin):
     """Capture events.
 
     :param connection: Connection to the broker.
@@ -191,21 +191,17 @@ class EventReceiver(object):
         handler = self.handlers.get(type) or self.handlers.get('*')
         handler and handler(event)
 
-    @contextmanager
-    def consumer(self, wakeup=True):
-        """Create event consumer."""
-        consumer = Consumer(self.connection,
-                            queues=[self.queue], no_ack=True)
-        consumer.register_callback(self._receive)
-        with consumer:
-            if wakeup:
-                self.wakeup_workers(channel=consumer.channel)
-            yield consumer
+    def get_consumers(self, Consumer, channel):
+        return [Consumer(queues=[self.queue],
+                         callbacks=[self._receive], no_ack=True)]
+
+    def on_consume_ready(self, connection, channel, consumers,
+            wakeup=True, **kwargs):
+        if wakeup:
+            self.wakeup_workers(channel=channel)
 
     def itercapture(self, limit=None, timeout=None, wakeup=True):
-        with self.consumer(wakeup=wakeup) as consumer:
-            yield consumer
-            self.drain_events(limit=limit, timeout=timeout)
+        return self.consume(limit=limit, timeout=timeout, wakeup=wakeup)
 
     def capture(self, limit=None, timeout=None, wakeup=True):
         """Open up a consumer capturing events.
@@ -214,17 +210,13 @@ class EventReceiver(object):
         stop unless forced via :exc:`KeyboardInterrupt` or :exc:`SystemExit`.
 
         """
-        list(self.itercapture(limit=limit, timeout=timeout, wakeup=wakeup))
+        return list(self.consume(limit=limit, timeout=timeout, wakeup=wakeup))
 
     def wakeup_workers(self, channel=None):
         self.app.control.broadcast('heartbeat',
                                    connection=self.connection,
                                    channel=channel)
 
-    def drain_events(self, **kwargs):
-        for _ in eventloop(self.connection, **kwargs):
-            pass
-
     def _receive(self, body, message):
         type = body.pop('type').lower()
         clock = body.get('clock')

+ 23 - 20
celery/events/cursesmon.py

@@ -6,8 +6,7 @@
     Graphical monitor of Celery events using curses.
 
 """
-from __future__ import absolute_import
-from __future__ import with_statement
+from __future__ import absolute_import, print_function
 
 import curses
 import sys
@@ -35,6 +34,10 @@ MIN_TASK_WIDTH = 16
 # this module is considered experimental
 # we don't care about coverage.
 
+STATUS_SCREEN = """\
+events: {s.event_count} tasks:{s.task_count} workers:{w_alive}/{w_all}
+"""
+
 
 class CursesMonitor(object):  # pragma: no cover
     keymap = {}
@@ -49,7 +52,7 @@ class CursesMonitor(object):  # pragma: no cover
     online_str = 'Workers online: '
     help_title = 'Keys: '
     help = ('j:up k:down i:info t:traceback r:result c:revoke ^c: quit')
-    greet = 'celeryev %s' % VERSION_BANNER
+    greet = 'celeryev {0}'.format(VERSION_BANNER)
     info_str = 'Info: '
 
     def __init__(self, state, keymap=None, app=None):
@@ -87,7 +90,8 @@ class CursesMonitor(object):  # pragma: no cover
         state = abbr(state, STATE_WIDTH).ljust(STATE_WIDTH)
         timestamp = timestamp.ljust(TIMESTAMP_WIDTH)
 
-        row = '%s %s %s %s %s ' % (uuid, worker, task, timestamp, state)
+        row = '{0} {1} {2} {3} {4} '.format(uuid, worker, task,
+                                            timestamp, state)
         if self.screen_width is None:
             self.screen_width = len(row[:mx])
         return row[:mx]
@@ -201,7 +205,7 @@ class CursesMonitor(object):  # pragma: no cover
                 curline = y()
 
                 host, response = subreply.items()[0]
-                host = '%s: ' % host
+                host = '{0}: '.format(host)
                 self.win.addstr(curline, 3, host, curses.A_BOLD)
                 attr = curses.A_NORMAL
                 text = ''
@@ -275,7 +279,7 @@ class CursesMonitor(object):  # pragma: no cover
                                 curses.A_NORMAL)
 
         return self.alert(alert_callback,
-                'Task details for %s' % self.selected_task)
+                'Task details for {0.selected_task}'.format(self))
 
     def selection_traceback(self):
         if not self.selected_task:
@@ -290,7 +294,7 @@ class CursesMonitor(object):  # pragma: no cover
                 self.win.addstr(y(), 3, line)
 
         return self.alert(alert_callback,
-                'Task Exception Traceback for %s' % self.selected_task)
+                'Task Exception Traceback for {0.selected_task}'.format(self))
 
     def selection_result(self):
         if not self.selected_task:
@@ -305,7 +309,7 @@ class CursesMonitor(object):  # pragma: no cover
                 self.win.addstr(y(), 3, line)
 
         return self.alert(alert_callback,
-                'Task Result for %s' % self.selected_task)
+                'Task Result for {0.selected_task}'.format(self))
 
     def display_task_row(self, lineno, task):
         state_color = self.state_colors.get(task.state)
@@ -366,10 +370,10 @@ class CursesMonitor(object):  # pragma: no cover
             else:
                 info = selection.info()
                 if 'runtime' in info:
-                    info['runtime'] = '%.2fs' % info['runtime']
+                    info['runtime'] = '{0:.2fs}'.format(info['runtime'])
                 if 'result' in info:
                     info['result'] = abbr(info['result'], 16)
-                info = ' '.join('%s=%s' % (key, value)
+                info = ' '.join('{0}={1}'.format(key, value)
                             for key, value in info.items())
                 detail = '... -> key i'
             infowin = abbr(info,
@@ -395,11 +399,10 @@ class CursesMonitor(object):  # pragma: no cover
         # Info
         win.addstr(my - 3, x, self.info_str, curses.A_BOLD)
         win.addstr(my - 3, x + len(self.info_str),
-                'events:%s tasks:%s workers:%s/%s' % (
-                    self.state.event_count, self.state.task_count,
-                    len([w for w in self.state.workers.values()
-                            if w.alive]),
-                    len(self.state.workers)),
+                STATUS_SCREEN.format(s=self.state,
+                    w_alive=len([w for w in self.state.workers.values()
+                                    if w.alive]),
+                    w_all=len(self.state.workers)),
                 curses.A_DIM)
 
         # Help
@@ -474,11 +477,11 @@ class DisplayThread(threading.Thread):  # pragma: no cover
 def capture_events(app, state, display):  # pragma: no cover
 
     def on_connection_error(exc, interval):
-        sys.stderr.write('Connection Error: %r. Retry in %ss.' % (
-            exc, interval))
+        print('Connection Error: {0!r}. Retry in {1}s.'.format(
+                exc, interval), file=sys.stderr)
 
     while 1:
-        sys.stderr.write('-> evtop: starting capture...\n')
+        print('-> evtop: starting capture...', file=sys.stderr)
         with app.connection() as conn:
             try:
                 conn.ensure_connection(on_connection_error,
@@ -488,8 +491,8 @@ def capture_events(app, state, display):  # pragma: no cover
                 display.init_screen()
                 with recv.consumer():
                     recv.drain_events(timeout=1, ignore_timeouts=True)
-            except (conn.connection_errors, conn.channel_errors), exc:
-                sys.stderr.write('Connection lost: %r' % (exc, ))
+            except conn.connection_errors + conn.channel_errors as exc:
+                print('Connection lost: {0!r}'.format(exc), file=sys.stderr)
 
 
 def evtop(app=None):  # pragma: no cover

+ 9 - 12
celery/events/dumper.py

@@ -7,7 +7,7 @@
     as they happen.  Think of it like a `tcpdump` for Celery events.
 
 """
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
 import sys
 
@@ -31,17 +31,13 @@ def humanize_type(type):
         return type.lower().replace('-', ' ')
 
 
-def say(msg, out=sys.stdout):
-    out.write(msg + '\n')
-
-
 class Dumper(object):
 
     def __init__(self, out=sys.stdout):
         self.out = out
 
     def say(self, msg):
-        say(msg, out=self.out)
+        print(msg, file=self.out)
 
     def on_event(self, event):
         timestamp = datetime.utcfromtimestamp(event.pop('timestamp'))
@@ -50,7 +46,8 @@ class Dumper(object):
         if type.startswith('task-'):
             uuid = event.pop('uuid')
             if type in ('task-received', 'task-sent'):
-                task = TASK_NAMES[uuid] = '%s(%s) args=%s kwargs=%s' % (
+                task = TASK_NAMES[uuid] = '{0}({1}) args={2} kwargs={3}' \
+                    .format(
                         event.pop('name'), uuid,
                         event.pop('args'),
                         event.pop('kwargs'))
@@ -58,17 +55,17 @@ class Dumper(object):
                 task = TASK_NAMES.get(uuid, '')
             return self.format_task_event(hostname, timestamp,
                                           type, task, event)
-        fields = ', '.join('%s=%s' % (key, event[key])
+        fields = ', '.join('{0}={1}'.format(key, event[key])
                         for key in sorted(event.keys()))
         sep = fields and ':' or ''
-        self.say('%s [%s] %s%s %s' % (hostname, timestamp,
-                                      humanize_type(type), sep, fields))
+        self.say('{0} [{1}] {2}{3} {4}'.format(hostname, timestamp,
+                                            humanize_type(type), sep, fields))
 
     def format_task_event(self, hostname, timestamp, type, task, event):
-        fields = ', '.join('%s=%s' % (key, event[key])
+        fields = ', '.join('{0}={1}'.format(key, event[key])
                         for key in sorted(event.keys()))
         sep = fields and ':' or ''
-        self.say('%s [%s] %s%s %s %s' % (hostname, timestamp,
+        self.say('{0} [{1}] {2}{3} {4} {5}'.format(hostname, timestamp,
                     humanize_type(type), sep, task, fields))
 
 

+ 2 - 3
celery/events/snapshot.py

@@ -94,9 +94,8 @@ def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
 
     app.log.setup_logging_subsystem(loglevel, logfile)
 
-    logger.info(
-        '-> evcam: Taking snapshots with %s (every %s secs.)\n' % (
-            camera, freq))
+    print('-> evcam: Taking snapshots with {0} (every {1} secs.)'.format(
+                camera, freq))
     state = app.events.State()
     cam = instantiate(camera, state, app=app, freq=freq,
                       maxrate=maxrate, timer=timer)

+ 10 - 8
celery/events/state.py

@@ -17,7 +17,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import heapq
 
@@ -76,8 +75,11 @@ class Worker(Element):
                 self.heartbeats = self.heartbeats[self.heartbeat_max:]
 
     def __repr__(self):
-        return '<Worker: %s (%s)' % (self.hostname,
-                                     self.alive and 'ONLINE' or 'OFFLINE')
+        return '<Worker: {0.hostname} (0.status_string)'.format(self)
+
+    @property
+    def status_string(self):
+        return 'ONLINE' if self.alive else 'OFFLINE'
 
     @property
     def heartbeat_expires(self):
@@ -197,7 +199,7 @@ class Task(Element):
         return dict(_keys())
 
     def __repr__(self):
-        return '<Task: %s(%s) %s>' % (self.name, self.uuid, self.state)
+        return '<Task: {0.name}({0.uuid}) {0.state}>'.format(self)
 
     @property
     def ready(self):
@@ -273,7 +275,7 @@ class State(object):
         hostname = fields.pop('hostname', None)
         if hostname:
             worker = self.get_or_create_worker(hostname)
-            handler = getattr(worker, 'on_%s' % type, None)
+            handler = getattr(worker, 'on_' + type, None)
             if handler:
                 handler(**fields)
 
@@ -283,7 +285,7 @@ class State(object):
         hostname = fields.pop('hostname')
         worker = self.get_or_create_worker(hostname)
         task = self.get_or_create_task(uuid)
-        handler = getattr(task, 'on_%s' % type, None)
+        handler = getattr(task, 'on_' + type, None)
         if type == 'received':
             self.task_count += 1
         if handler:
@@ -352,8 +354,8 @@ class State(object):
         return [w for w in self.workers.values() if w.alive]
 
     def __repr__(self):
-        return '<ClusterState: events=%s tasks=%s>' % (self.event_count,
-                                                       self.task_count)
+        return '<State: events={0.event_count} tasks={0.task_count}>' \
+                    .format(self)
 
 
 state = State()

+ 6 - 6
celery/exceptions.py

@@ -13,7 +13,7 @@ from billiard.exceptions import (  # noqa
 )
 
 UNREGISTERED_FMT = """\
-Task of kind %s is not registered, please make sure it's imported.\
+Task of kind {0} is not registered, please make sure it's imported.\
 """
 
 
@@ -41,7 +41,7 @@ class NotRegistered(KeyError):
     """The task is not registered."""
 
     def __repr__(self):
-        return UNREGISTERED_FMT % str(self)
+        return UNREGISTERED_FMT.format(self)
 
 
 class AlreadyRegistered(Exception):
@@ -71,15 +71,15 @@ class RetryTaskError(Exception):
 
     def humanize(self):
         if isinstance(self.when, int):
-            return 'in %ss' % self.when
-        return 'at %s' % (self.when, )
+            return 'in {0.when}s'.format(self)
+        return 'at {0.when}'.format(self)
 
     def __str__(self):
         if self.message:
             return self.message
         if self.excs:
-            return 'Retry %s: %r' % (self.humanize(), self.excs)
-        return 'Retry %s' % self.humanize()
+            return 'Retry {0}: {1!r}'.format(self.humanize(), self.excs)
+        return 'Retry {0}'.format(self.humanize())
 
     def __reduce__(self):
         return self.__class__, (self.message, self.excs, self.when)

+ 6 - 4
celery/loaders/base.py

@@ -13,6 +13,7 @@ import importlib
 import os
 import re
 
+from future_builtins import map
 from datetime import datetime
 
 from kombu.utils.encoding import safe_str
@@ -26,7 +27,7 @@ from celery.utils.functional import maybe_list
 BUILTIN_MODULES = frozenset()
 
 ERROR_ENVVAR_NOT_SET = (
-"""The environment variable %r is not set,
+"""The environment variable {0!r} is not set,
 and as such the configuration could not be loaded.
 Please set this variable and make it point to
 a configuration module.""")
@@ -126,7 +127,8 @@ class BaseLoader(object):
         if not module_name:
             if silent:
                 return False
-            raise ImproperlyConfigured(self.error_envvar_not_set % module_name)
+            raise ImproperlyConfigured(
+                    self.error_envvar_not_set.format(module_name))
         return self.config_from_object(module_name, silent=silent)
 
     def config_from_object(self, obj, silent=False):
@@ -184,9 +186,9 @@ class BaseLoader(object):
             else:
                 try:
                     value = NAMESPACES[ns][key].to_python(value)
-                except ValueError, exc:
+                except ValueError as exc:
                     # display key name in error message.
-                    raise ValueError('%r: %s' % (ns_key, exc))
+                    raise ValueError('{0!r}: {1}'.format(ns_key, exc))
             return ns_key, value
 
         return dict(map(getarg, args))

+ 9 - 9
celery/loaders/default.py

@@ -25,12 +25,12 @@ DEFAULT_CONFIG_MODULE = 'celeryconfig'
 C_WNOCONF = strtobool(os.environ.get('C_WNOCONF', False))
 
 CONFIG_INVALID_NAME = """
-Error: Module '%(module)s' doesn't exist, or it's not a valid \
+Error: Module '{module}' doesn't exist, or it's not a valid \
 Python module name.
 """
 
 CONFIG_WITH_SUFFIX = CONFIG_INVALID_NAME + """
-Did you mean '%(suggest)s'?
+Did you mean '{suggest}'?
 """
 
 
@@ -53,18 +53,18 @@ class Loader(BaseLoader):
         except NotAPackage:
             if configname.endswith('.py'):
                 raise NotAPackage, NotAPackage(
-                        CONFIG_WITH_SUFFIX % {
-                            'module': configname,
-                            'suggest': configname[:-3]}), sys.exc_info()[2]
+                        CONFIG_WITH_SUFFIX.format(
+                            module=configname,
+                            suggest=configname[:-3])), sys.exc_info()[2]
             raise NotAPackage, NotAPackage(
-                    CONFIG_INVALID_NAME % {
-                        'module': configname}), sys.exc_info()[2]
+                    CONFIG_INVALID_NAME.format(
+                        module=configname)), sys.exc_info()[2]
         except ImportError:
             # billiard sets this if forked using execv
             if C_WNOCONF and not os.environ.get('FORKED_BY_MULTIPROCESSING'):
                 warnings.warn(NotConfigured(
-                    'No %r module found! Please make sure it exists and '
-                    'is available to Python.' % (configname, )))
+                    'No {module} module found! Please make sure it exists and '
+                    'is available to Python.'.format(module=configname)))
             return self.setup_settings({})
         else:
             celeryconfig = self.import_from_cwd(configname)

+ 4 - 6
celery/local.py

@@ -82,7 +82,7 @@ class Proxy(object):
         try:
             return getattr(self.__local, self.__name__)
         except AttributeError:
-            raise RuntimeError('no object bound to %s' % self.__name__)
+            raise RuntimeError('no object bound to {0.__name__}'.format(self))
 
     @property
     def __dict__(self):
@@ -95,7 +95,7 @@ class Proxy(object):
         try:
             obj = self._get_current_object()
         except RuntimeError:  # pragma: no cover
-            return '<%s unbound>' % self.__class__.__name__
+            return '<{0} unbound>'.format(self.__class__.__name__)
         return repr(obj)
 
     def __nonzero__(self):
@@ -425,7 +425,5 @@ class LocalManager(object):
             release_local(local)
 
     def __repr__(self):
-        return '<%s storages: %d>' % (
-            self.__class__.__name__,
-            len(self.locals)
-        )
+        return '<{0} storages: {1}>'.format(
+            self.__class__.__name__, len(self.locals))

+ 27 - 34
celery/platforms.py

@@ -7,8 +7,7 @@
     users, groups, and so on.
 
 """
-from __future__ import absolute_import
-from __future__ import with_statement
+from __future__ import absolute_import, print_function
 
 import atexit
 import errno
@@ -19,6 +18,7 @@ import signal as _signal
 import sys
 
 from contextlib import contextmanager
+from future_builtins import map
 
 from .local import try_import
 
@@ -49,8 +49,8 @@ PIDFILE_MODE = ((os.R_OK | os.W_OK) << 6) | ((os.R_OK) << 3) | ((os.R_OK))
 
 _setps_bucket = TokenBucket(0.5)  # 30/m, every 2 seconds
 
-PIDLOCKED = """ERROR: Pidfile (%s) already exists.
-Seems we're already running? (PID: %s)"""
+PIDLOCKED = """ERROR: Pidfile ({0}) already exists.
+Seems we're already running? (PID: {1})"""
 
 
 def pyimplementation():
@@ -106,7 +106,7 @@ class PIDFile(object):
         """Acquire lock."""
         try:
             self.write_pid()
-        except OSError, exc:
+        except OSError as exc:
             raise LockFailed, LockFailed(str(exc)), sys.exc_info()[2]
         return self
     __enter__ = acquire
@@ -124,7 +124,7 @@ class PIDFile(object):
         """Reads and returns the current pid."""
         try:
             fh = open(self.path, 'r')
-        except IOError, exc:
+        except IOError as exc:
             if exc.errno == errno.ENOENT:
                 return
             raise
@@ -133,20 +133,20 @@ class PIDFile(object):
             line = fh.readline()
             if line.strip() == line:  # must contain '\n'
                 raise ValueError(
-                    'Partially written or invalid pidfile %r' % (self.path))
+                    'Partial or invalid pidfile {0.path}'.format(self))
         finally:
             fh.close()
 
         try:
             return int(line.strip())
         except ValueError:
-            raise ValueError('PID file %r contents invalid.' % self.path)
+            raise ValueError('PID file {0.path} invalid.'.format(self))
 
     def remove(self):
         """Removes the lock."""
         try:
             os.unlink(self.path)
-        except OSError, exc:
+        except OSError as exc:
             if exc.errno in (errno.ENOENT, errno.EACCES):
                 return
             raise
@@ -156,8 +156,8 @@ class PIDFile(object):
         (does not respond to signals)."""
         try:
             pid = self.read_pid()
-        except ValueError, exc:
-            sys.stderr.write('Broken pidfile found. Removing it.\n')
+        except ValueError as exc:
+            print('Broken pidfile found. Removing it.', file=sys.stderr)
             self.remove()
             return True
         if not pid:
@@ -166,16 +166,16 @@ class PIDFile(object):
 
         try:
             os.kill(pid, 0)
-        except os.error, exc:
+        except os.error as exc:
             if exc.errno == errno.ESRCH:
-                sys.stderr.write('Stale pidfile exists. Removing it.\n')
+                print('Stale pidfile exists. Removing it.', file=sys.stderr)
                 self.remove()
                 return True
         return False
 
     def write_pid(self):
         pid = os.getpid()
-        content = '%d\n' % (pid, )
+        content = '{0}\n'.format(pid)
 
         pidfile_fd = os.open(self.path, PIDFILE_FLAGS, PIDFILE_MODE)
         pidfile = os.fdopen(pidfile_fd, 'w')
@@ -220,7 +220,7 @@ def create_pidlock(pidfile):
     """
     pidlock = PIDFile(pidfile)
     if pidlock.is_locked() and not pidlock.remove_if_stale():
-        raise SystemExit(PIDLOCKED % (pidfile, pidlock.read_pid()))
+        raise SystemExit(PIDLOCKED.format(pidfile, pidlock.read_pid()))
     pidlock.acquire()
     atexit.register(pidlock.release)
     return pidlock
@@ -245,10 +245,7 @@ class DaemonContext(object):
             os.chdir(self.workdir)
             os.umask(self.umask)
 
-            for fd in reversed(range(get_fdmax(default=2048))):
-                with ignore_EBADF():
-                    os.close(fd)
-
+            os.closerange(1, get_fdmax(default=2048))
             os.open(DAEMON_REDIRECT_TO, os.O_RDWR)
             os.dup2(0, 1)
             os.dup2(0, 2)
@@ -340,7 +337,7 @@ def parse_uid(uid):
         try:
             return pwd.getpwnam(uid).pw_uid
         except (AttributeError, KeyError):
-            raise KeyError('User does not exist: %r' % (uid, ))
+            raise KeyError('User does not exist: {0}'.format(uid))
 
 
 def parse_gid(gid):
@@ -356,7 +353,7 @@ def parse_gid(gid):
         try:
             return grp.getgrnam(gid).gr_gid
         except (AttributeError, KeyError):
-            raise KeyError('Group does not exist: %r' % (gid, ))
+            raise KeyError('Group does not exist: {0}'.format(gid))
 
 
 def _setgroups_hack(groups):
@@ -372,7 +369,7 @@ def _setgroups_hack(groups):
             if len(groups) <= 1:
                 raise
             groups[:] = groups[:-1]
-        except OSError, exc:  # error from the OS.
+        except OSError as exc:  # error from the OS.
             if exc.errno != errno.EINVAL or len(groups) <= 1:
                 raise
             groups[:] = groups[:-1]
@@ -386,7 +383,7 @@ def setgroups(groups):
         pass
     try:
         return _setgroups_hack(groups[:max_groups])
-    except OSError, exc:
+    except OSError as exc:
         if exc.errno != errno.EPERM:
             raise
         if any(group not in groups for group in os.getgroups()):
@@ -573,8 +570,8 @@ def set_process_title(progname, info=None):
     Only works if :mod:`setproctitle` is installed.
 
     """
-    proctitle = '[%s]' % progname
-    proctitle = '%s %s' % (proctitle, info) if info else proctitle
+    proctitle = '[{0}]'.format(progname)
+    proctitle = '{0} {1}'.format(proctitle, info) if info else proctitle
     if _setproctitle:
         _setproctitle.setproctitle(proctitle)
     return proctitle
@@ -595,23 +592,19 @@ else:
         """
         if not rate_limit or _setps_bucket.can_consume(1):
             if hostname:
-                progname = '%s@%s' % (progname, hostname.split('.')[0])
+                progname = '{0}@{1}'.format(progname, hostname.split('.')[0])
             return set_process_title(
-                '%s:%s' % (progname, current_process().name), info=info)
+                '{0}:{1}'.format(progname, current_process().name), info=info)
 
 
-def shellsplit(s, posix=True):
-    # posix= option to shlex.split first available in Python 2.6+
-    lexer = shlex.shlex(s, posix=not IS_WINDOWS)
-    lexer.whitespace_split = True
-    lexer.commenters = ''
-    return list(lexer)
+def shellsplit(s):
+    return shlex.split(s, posix=not IS_WINDOWS)
 
 
 @contextmanager
 def ignore_EBADF():
     try:
         yield
-    except OSError, exc:
+    except OSError as exc:
         if exc.errno != errno.EBADF:
             raise

+ 21 - 18
celery/result.py

@@ -7,13 +7,12 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import time
 
 from collections import deque
 from copy import copy
-from itertools import imap
+from future_builtins import map
 
 from . import current_app
 from . import states
@@ -198,7 +197,7 @@ class AsyncResult(ResultBase):
         return hash(self.id)
 
     def __repr__(self):
-        return '<%s: %s>' % (self.__class__.__name__, self.id)
+        return '<{0}: {1}>'.format(type(self).__name__, self.id)
 
     def __eq__(self, other):
         if isinstance(other, AsyncResult):
@@ -229,7 +228,7 @@ class AsyncResult(ResultBase):
     def children(self):
         children = self.backend.get_children(self.id)
         if children:
-            return map(from_serializable, children)
+            return [from_serializable(child) for child in children]
 
     @property
     def result(self):
@@ -277,12 +276,14 @@ class AsyncResult(ResultBase):
         return self.backend.get_status(self.id)
     status = state
 
-    def _get_task_id(self):
+    @property
+    def task_id(self):
+        """compat alias to :attr:`id`"""
         return self.id
 
-    def _set_task_id(self, id):
+    @task_id.setter  # noqa
+    def task_id(self, id):
         self.id = id
-    task_id = property(_get_task_id, _set_task_id)
 BaseAsyncResult = AsyncResult  # for backwards compatibility.
 
 
@@ -385,7 +386,7 @@ class ResultSet(ResultBase):
         :returns: the number of tasks completed.
 
         """
-        return sum(imap(int, (result.successful() for result in self.results)))
+        return sum(map(int, (result.successful() for result in self.results)))
 
     def forget(self):
         """Forget about (and possible remove the result of) all the tasks."""
@@ -394,7 +395,7 @@ class ResultSet(ResultBase):
 
     def revoke(self, connection=None):
         """Revoke all tasks in the set."""
-        with self.app.default_connection(connection) as conn:
+        with self.app.connection_or_acquire(connection) as conn:
             for result in self.results:
                 result.revoke(connection=conn)
 
@@ -431,7 +432,7 @@ class ResultSet(ResultBase):
             time.sleep(interval)
             elapsed += interval
             if timeout and elapsed >= timeout:
-                raise TimeoutError("The operation timed out")
+                raise TimeoutError('The operation timed out')
 
     def get(self, timeout=None, propagate=True, interval=0.5):
         """See :meth:`join`
@@ -535,8 +536,8 @@ class ResultSet(ResultBase):
         return NotImplemented
 
     def __repr__(self):
-        return '<%s: [%s]>' % (self.__class__.__name__,
-                               ', '.join(r.id for r in self.results))
+        return '<{0}: [{1}]>'.format(type(self).__name__,
+                                     ', '.join(r.id for r in self.results))
 
     @property
     def subtasks(self):
@@ -599,8 +600,8 @@ class GroupResult(ResultSet):
         return NotImplemented
 
     def __repr__(self):
-        return '<%s: %s [%s]>' % (self.__class__.__name__, self.id,
-                                  ', '.join(r.id for r in self.results))
+        return '<{0}: {1} [{2}]>'.format(type(self).__name__, self.id,
+                                         ', '.join(r.id for r in self.results))
 
     def serializable(self):
         return self.id, [r.serializable() for r in self.results]
@@ -630,12 +631,14 @@ class TaskSetResult(GroupResult):
         """Deprecated: Use ``len(r)``."""
         return len(self)
 
-    def _get_taskset_id(self):
+    @property
+    def taskset_id(self):
+        """compat alias to :attr:`self.id`"""
         return self.id
 
-    def _set_taskset_id(self, id):
+    @taskset_id.setter  # noqa
+    def taskset_id(self, id):
         self.id = id
-    taskset_id = property(_get_taskset_id, _set_taskset_id)
 
 
 class EagerResult(AsyncResult):
@@ -676,7 +679,7 @@ class EagerResult(AsyncResult):
         self._state = states.REVOKED
 
     def __repr__(self):
-        return "<EagerResult: %s>" % self.id
+        return '<EagerResult: {0.id}>'.format(self)
 
     @property
     def result(self):

+ 17 - 13
celery/schedules.py

@@ -21,6 +21,16 @@ from .utils.timeutils import (timedelta_seconds, weekday, maybe_timedelta,
                               timezone)
 from .datastructures import AttributeDict
 
+CRON_PATTERN_INVALID = """\
+Invalid crontab pattern. Valid range is {min}-{max}. \
+'{value}' was found.\
+"""
+
+CRON_INVALID_TYPE = """\
+Argument cronspec needs to be of any of the following types: \
+int, basestring, or an iterable type. {type!r} was given.\
+"""
+
 
 class ParseException(Exception):
     """Raised by crontab_parser when the input can't be parsed."""
@@ -80,7 +90,7 @@ class schedule(object):
         return False, rem
 
     def __repr__(self):
-        return '<freq: %s>' % self.human_seconds
+        return '<freq: {0.human_seconds}>'.format(self)
 
     def __eq__(self, other):
         if isinstance(other, schedule):
@@ -195,11 +205,11 @@ class crontab_parser(object):
             try:
                 i = weekday(s)
             except KeyError:
-                raise ValueError("Invalid weekday literal '%s'." % s)
+                raise ValueError('Invalid weekday literal {0!r}.'.format(s))
 
         if i < self.min_:
-            raise ValueError('Invalid beginning range: %s < %s.' %
-                                                   (i, self.min_))
+            raise ValueError(
+                'Invalid beginning range: {0} < {1}.'.format(i, self.min_))
         return i
 
 
@@ -304,19 +314,13 @@ class crontab(schedule):
         elif is_iterable(cronspec):
             result = set(cronspec)
         else:
-            raise TypeError(
-                    'Argument cronspec needs to be of any of the '
-                    'following types: int, basestring, or an iterable type. '
-                    "'%s' was given." % type(cronspec))
+            raise TypeError(CRON_INVALID_TYPE.format(type=type(cronspec)))
 
         # assure the result does not preceed the min or exceed the max
         for number in result:
             if number >= max_ + min_ or number < min_:
-                raise ValueError(
-                        'Invalid crontab pattern. Valid '
-                        "range is %d-%d. '%d' was found." %
-                        (min_, max_ - 1 + min_, number))
-
+                raise ValueError(CRON_PATTERN_INVALID.format(
+                    min=min_, max=max_ - 1 + min_, value=number))
         return result
 
     def _delta_to_next(self, last_run_at, next_hour, next_minute):

+ 0 - 1
celery/security/__init__.py

@@ -7,7 +7,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from kombu.serialization import registry
 

+ 6 - 7
celery/security/certificate.py

@@ -7,7 +7,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import glob
 import os
@@ -22,7 +21,7 @@ class Certificate(object):
 
     def __init__(self, cert):
         assert crypto is not None
-        with reraise_errors('Invalid certificate: %r'):
+        with reraise_errors('Invalid certificate: {0!r}'):
             self._cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
 
     def has_expired(self):
@@ -40,11 +39,11 @@ class Certificate(object):
 
     def get_id(self):
         """Serial number/issuer pair uniquely identifies a certificate"""
-        return '%s %s' % (self.get_issuer(), self.get_serial_number())
+        return '{0} {1}'.format(self.get_issuer(), self.get_serial_number())
 
     def verify(self, data, signature, digest):
         """Verifies the signature for string containing data."""
-        with reraise_errors('Bad signature: %r'):
+        with reraise_errors('Bad signature: {0!r}'):
             crypto.verify(self._cert, signature, data, digest)
 
 
@@ -64,11 +63,11 @@ class CertStore(object):
         try:
             return self._certs[id]
         except KeyError:
-            raise SecurityError('Unknown certificate: %r' % (id, ))
+            raise SecurityError('Unknown certificate: {0!r}'.format(id))
 
     def add_cert(self, cert):
         if cert.get_id() in self._certs:
-            raise SecurityError('Duplicate certificate: %r' % (id, ))
+            raise SecurityError('Duplicate certificate: {0!r}'.format(id))
         self._certs[cert.get_id()] = cert
 
 
@@ -84,5 +83,5 @@ class FSCertStore(CertStore):
                 cert = Certificate(f.read())
                 if cert.has_expired():
                     raise SecurityError(
-                        'Expired certificate: %r' % (cert.get_id(), ))
+                        'Expired certificate: {0!r}'.format(cert.get_id()))
                 self.add_cert(cert)

+ 2 - 3
celery/security/key.py

@@ -7,7 +7,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from .utils import crypto, reraise_errors
 
@@ -15,10 +14,10 @@ from .utils import crypto, reraise_errors
 class PrivateKey(object):
 
     def __init__(self, key):
-        with reraise_errors('Invalid private key: %r'):
+        with reraise_errors('Invalid private key: {0!r}'):
             self._key = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
 
     def sign(self, data, digest):
         """sign string containing data."""
-        with reraise_errors('Unable to sign data: %r'):
+        with reraise_errors('Unable to sign data: {0!r}'):
             return crypto.sign(self._key, data, digest)

+ 3 - 3
celery/security/serialization.py

@@ -7,10 +7,10 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import base64
 
+from future_builtins import zip
 from kombu.serialization import registry, encode, decode
 from kombu.utils.encoding import bytes_to_str, str_to_bytes
 
@@ -41,7 +41,7 @@ class SecureSerializer(object):
         """serialize data structure into string"""
         assert self._key is not None
         assert self._cert is not None
-        with reraise_errors('Unable to serialize: %r', (Exception, )):
+        with reraise_errors('Unable to serialize: {0!r}', (Exception, )):
             content_type, content_encoding, body = encode(
                     data, serializer=self._serializer)
             # What we sign is the serialized body, not the body itself.
@@ -55,7 +55,7 @@ class SecureSerializer(object):
     def deserialize(self, data):
         """deserialize data structure from string"""
         assert self._cert_store is not None
-        with reraise_errors('Unable to deserialize: %r', (Exception, )):
+        with reraise_errors('Unable to deserialize: {0!r}', (Exception, )):
             payload = self._unpack(data)
             signature, signer, body = (payload['signature'],
                                        payload['signer'],

+ 3 - 3
celery/security/utils.py

@@ -21,10 +21,10 @@ except ImportError:  # pragma: no cover
 
 
 @contextmanager
-def reraise_errors(msg='%r', errors=None):
+def reraise_errors(msg='{0!r}', errors=None):
     assert crypto is not None
     errors = (crypto.Error, ) if errors is None else errors
     try:
         yield
-    except errors, exc:
-        raise SecurityError, SecurityError(msg % (exc, )), sys.exc_info()[2]
+    except errors as exc:
+        raise SecurityError, SecurityError(msg.format(exc)), sys.exc_info()[2]

+ 6 - 9
celery/task/base.py

@@ -45,14 +45,12 @@ class Task(BaseTask):
     immediate = False
     priority = None
     type = 'regular'
-    error_whitelist = ()
     disable_error_emails = False
     accept_magic_kwargs = None  # get default from app
 
     from_config = BaseTask.from_config + (
         ('exchange_type', 'CELERY_DEFAULT_EXCHANGE_TYPE'),
         ('delivery_mode', 'CELERY_DEFAULT_DELIVERY_MODE'),
-        ('error_whitelist', 'CELERY_TASK_ERROR_WHITELIST'),
     )
 
     # In old Celery the @task decorator didn't exist, so one would create
@@ -73,7 +71,7 @@ class Task(BaseTask):
         return get_task_logger(self.name)
 
     @classmethod
-    def establish_connection(self, connect_timeout=None):
+    def establish_connection(self):
         """Deprecated method used to get a broker connection.
 
         Should be replaced with :meth:`@Celery.connection`
@@ -89,11 +87,10 @@ class Task(BaseTask):
             with celery.connection() as conn:
                 ...
         """
-        return self._get_app().connection(
-                connect_timeout=connect_timeout)
+        return self._get_app().connection()
 
     def get_publisher(self, connection=None, exchange=None,
-            connect_timeout=None, exchange_type=None, **options):
+            exchange_type=None, **options):
         """Deprecated method to get the task publisher (now called producer).
 
         Should be replaced with :class:`@amqp.TaskProducer`:
@@ -108,7 +105,7 @@ class Task(BaseTask):
         exchange = self.exchange if exchange is None else exchange
         if exchange_type is None:
             exchange_type = self.exchange_type
-        connection = connection or self.establish_connection(connect_timeout)
+        connection = connection or self.establish_connection()
         return self._get_app().amqp.TaskProducer(connection,
                 exchange=exchange and Exchange(exchange, exchange_type),
                 routing_key=self.routing_key, **options)
@@ -175,7 +172,7 @@ def task(*args, **kwargs):
         def refresh_feed(url):
             try:
                 return Feed.objects.get(url=url).refresh()
-            except socket.error, exc:
+            except socket.error as exc:
                 refresh_feed.retry(exc=exc)
 
     Calling the resulting task:
@@ -210,7 +207,7 @@ def periodic_task(*args, **options):
                 def refresh_feed(url):
                     try:
                         return Feed.objects.get(url=url).refresh()
-                    except socket.error, exc:
+                    except socket.error as exc:
                         current.retry(exc=exc)
 
             Calling the resulting task:

+ 7 - 7
celery/task/http.py

@@ -67,7 +67,7 @@ def extract_response(raw_response, loads=anyjson.loads):
         raise InvalidResponseError('Empty response')
     try:
         payload = loads(raw_response)
-    except ValueError, exc:
+    except ValueError as exc:
         raise InvalidResponseError, InvalidResponseError(
                 str(exc)), sys.exc_info()[2]
 
@@ -108,13 +108,13 @@ class MutableURL(object):
         scheme, netloc, path, params, query, fragment = self.parts
         query = urlencode(utf8dict(self.query.items()))
         components = [scheme + '://', netloc, path or '/',
-                      ';%s' % params   if params   else '',
-                      '?%s' % query    if query    else '',
-                      '#%s' % fragment if fragment else '']
+                      ';{0}'.format(params)   if params   else '',
+                      '?{0}'.format(query)    if query    else '',
+                      '#{0}'.format(fragment) if fragment else '']
         return ''.join(filter(None, components))
 
     def __repr__(self):
-        return '<%s: %s>' % (self.__class__.__name__, str(self))
+        return '<{0}: {1}>'.format(type(self).__name__, self)
 
 
 class HttpDispatch(object):
@@ -127,14 +127,14 @@ class HttpDispatch(object):
     :param logger: Logger used for user/system feedback.
 
     """
-    user_agent = 'celery/%s' % celery_version
+    user_agent = 'celery/{version}'.format(version=celery_version)
     timeout = 5
 
     def __init__(self, url, method, task_kwargs, **kwargs):
         self.url = url
         self.method = method
         self.task_kwargs = task_kwargs
-        self.logger = kwargs.get("logger") or logger
+        self.logger = kwargs.get('logger') or logger
 
     def make_request(self, url, method, params):
         """Makes an HTTP request and returns the response."""

+ 6 - 7
celery/task/sets.py

@@ -8,7 +8,6 @@
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from celery._state import get_current_worker_task
 from celery.app import app_or_default
@@ -38,15 +37,14 @@ class TaskSet(list):
         self.Publisher = Publisher or self.app.amqp.TaskProducer
         self.total = len(self)  # XXX compat
 
-    def apply_async(self, connection=None, connect_timeout=None,
-            publisher=None, taskset_id=None):
+    def apply_async(self, connection=None, publisher=None, taskset_id=None):
         """Apply TaskSet."""
         app = self.app
 
         if app.conf.CELERY_ALWAYS_EAGER:
             return self.apply(taskset_id=taskset_id)
 
-        with app.default_connection(connection, connect_timeout) as conn:
+        with app.connection_or_acquire(connection) as conn:
             setid = taskset_id or uuid()
             pub = publisher or self.Publisher(conn)
             results = self._async_results(setid, pub)
@@ -69,9 +67,10 @@ class TaskSet(list):
     def _sync_results(self, taskset_id):
         return [task.apply(taskset_id=taskset_id) for task in self]
 
-    def _get_tasks(self):
+    @property
+    def tasks(self):
         return self
 
-    def _set_tasks(self, tasks):
+    @tasks.setter  # noqa
+    def tasks(self, tasks):
         self[:] = tasks
-    tasks = property(_get_tasks, _set_tasks)

+ 7 - 17
celery/task/trace.py

@@ -211,11 +211,11 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
                 try:
                     R = retval = fun(*args, **kwargs)
                     state = SUCCESS
-                except RetryTaskError, exc:
+                except RetryTaskError as exc:
                     I = Info(RETRY, exc)
                     state, retval = I.state, I.retval
                     R = I.handle_error_state(task, eager=eager)
-                except Exception, exc:
+                except Exception as exc:
                     if propagate:
                         raise
                     I = Info(FAILURE, exc)
@@ -223,18 +223,8 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
                     R = I.handle_error_state(task, eager=eager)
                     [subtask(errback).apply_async((uuid, ))
                         for errback in task_request.errbacks or []]
-                except BaseException, exc:
+                except BaseException as exc:
                     raise
-                except:  # pragma: no cover
-                    # For Python2.5 where raising strings are still allowed
-                    # (but deprecated)
-                    if propagate:
-                        raise
-                    I = Info(FAILURE, None)
-                    state, retval = I.state, I.retval
-                    R = I.handle_error_state(task, eager=eager)
-                    [subtask(errback).apply_async((uuid, ))
-                        for errback in task_request.errbacks or []]
                 else:
                     # callback tasks must be applied before the result is
                     # stored, so that result.children is populated.
@@ -265,10 +255,10 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
                         loader_cleanup()
                     except (KeyboardInterrupt, SystemExit, MemoryError):
                         raise
-                    except Exception, exc:
+                    except Exception as exc:
                         _logger.error('Process cleanup failed: %r', exc,
                                       exc_info=True)
-        except Exception, exc:
+        except Exception as exc:
             if eager:
                 raise
             R = report_internal_error(task, exc)
@@ -282,7 +272,7 @@ def trace_task(task, uuid, args, kwargs, request={}, **opts):
         if task.__trace__ is None:
             task.__trace__ = build_tracer(task.name, task, **opts)
         return task.__trace__(uuid, args, kwargs, request)[0]
-    except Exception, exc:
+    except Exception as exc:
         return report_internal_error(task, exc)
 
 
@@ -302,7 +292,7 @@ def report_internal_error(task, exc):
         _value = task.backend.prepare_exception(exc)
         exc_info = ExceptionInfo((_type, _value, _tb), internal=True)
         warn(RuntimeWarning(
-            'Exception raised outside body: %r:\n%s' % (
+            'Exception raised outside body: {0!r}:\n{1}'.format(
                 exc, exc_info.traceback)))
         return exc_info
     finally:

+ 1 - 3
celery/tests/__init__.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import logging
 import os
@@ -80,7 +79,6 @@ def import_all_modules(name=__name__, file=__file__,
 
 
 if os.environ.get('COVER_ALL_MODULES') or '--with-coverage3' in sys.argv:
-    from celery.tests.utils import catch_warnings
-    with catch_warnings(record=True):
+    with warnings.catch_warnings(record=True):
         import_all_modules()
     warnings.resetwarnings()

+ 0 - 1
celery/tests/app/test_amqp.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from kombu import Exchange, Queue
 from mock import Mock

+ 0 - 4
celery/tests/app/test_app.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import os
 
@@ -306,7 +305,6 @@ class test_App(Case):
     def test_config_from_cmdline(self):
         cmdline = ['.always_eager=no',
                    '.result_backend=/dev/null',
-                   '.task_error_whitelist=(list)["a", "b", "c"]',
                    'celeryd.prefetch_multiplier=368',
                    '.foobarstring=(string)300',
                    '.foobarint=(int)300',
@@ -315,8 +313,6 @@ class test_App(Case):
         self.assertFalse(self.app.conf.CELERY_ALWAYS_EAGER)
         self.assertEqual(self.app.conf.CELERY_RESULT_BACKEND, '/dev/null')
         self.assertEqual(self.app.conf.CELERYD_PREFETCH_MULTIPLIER, 368)
-        self.assertListEqual(self.app.conf.CELERY_TASK_ERROR_WHITELIST,
-                             ['a', 'b', 'c'])
         self.assertEqual(self.app.conf.CELERY_FOOBARSTRING, '300')
         self.assertEqual(self.app.conf.CELERY_FOOBARINT, 300)
         self.assertDictEqual(self.app.conf.CELERY_RESULT_ENGINE_OPTIONS,

+ 5 - 6
celery/tests/app/test_beat.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import errno
 
@@ -62,9 +61,9 @@ class test_ScheduleEntry(Case):
         self.assertEqual(entry.total_run_count, 0)
 
         next_run_at = entry.last_run_at + timedelta(seconds=10)
-        next = entry.next(next_run_at)
-        self.assertGreaterEqual(next.last_run_at, next_run_at)
-        self.assertEqual(next.total_run_count, 1)
+        next_entry = entry.next(next_run_at)
+        self.assertGreaterEqual(next_entry.last_run_at, next_run_at)
+        self.assertEqual(next_entry.total_run_count, 1)
 
     def test_is_due(self):
         entry = self.create_entry(schedule=timedelta(seconds=10))
@@ -73,8 +72,8 @@ class test_ScheduleEntry(Case):
         self.assertGreater(next_time_to_run1, 9)
 
         next_run_at = entry.last_run_at - timedelta(seconds=10)
-        next = entry.next(next_run_at)
-        due2, next_time_to_run2 = next.is_due()
+        next_entry = entry.next(next_run_at)
+        due2, next_time_to_run2 = next_entry.is_due()
         self.assertTrue(due2)
         self.assertGreater(next_time_to_run2, 9)
 

+ 0 - 1
celery/tests/app/test_control.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from functools import wraps
 

+ 1 - 2
celery/tests/app/test_defaults.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import sys
 
@@ -34,7 +33,7 @@ class test_defaults(Case):
 
     def test_deprecated(self):
         source = Mock()
-        source.BROKER_INSIST = True
+        source.CELERYD_LOG_LEVEL = 2
         with patch('celery.utils.warn_deprecated') as warn:
             self.defaults.find_deprecated_settings(source)
             self.assertTrue(warn.called)

+ 2 - 3
celery/tests/app/test_loaders.py

@@ -1,8 +1,8 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import os
 import sys
+import warnings
 
 from mock import Mock, patch
 
@@ -20,7 +20,6 @@ from celery.utils.imports import NotAPackage
 from celery.utils.mail import SendmailWarning
 
 from celery.tests.utils import AppCase, Case
-from celery.tests.compat import catch_warnings
 
 
 class ObjectConfig(object):
@@ -237,7 +236,7 @@ class test_DefaultLoader(Case):
             def find_module(self, name):
                 raise ImportError(name)
 
-        with catch_warnings(record=True):
+        with warnings.catch_warnings(record=True):
             l = _Loader()
             self.assertDictEqual(l.conf, {})
             context_executed[0] = True

+ 0 - 1
celery/tests/app/test_log.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import sys
 import logging

+ 1 - 2
celery/tests/app/test_routes.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from functools import wraps
 
@@ -13,7 +12,7 @@ from celery.tests.utils import Case
 
 
 def Router(*args, **kwargs):
-    return routes.Router(*args, **dict(kwargs, app=current_app))
+    return routes.Router(*args, app=current_app, **kwargs)
 
 
 @task()

+ 6 - 17
celery/tests/backends/test_amqp.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import socket
 
@@ -64,7 +63,7 @@ class test_AMQPBackend(AppCase):
         tid3 = uuid()
         try:
             raise KeyError('foo')
-        except KeyError, exception:
+        except KeyError as exception:
             einfo = ExceptionInfo()
             tb1.mark_as_failure(tid3, exception, traceback=einfo.traceback)
             self.assertEqual(tb2.get_status(tid3), states.FAILURE)
@@ -77,16 +76,6 @@ class test_AMQPBackend(AppCase):
             tid = uuid()
             self.assertEqual(repair_uuid(tid.replace('-', '')), tid)
 
-    def test_expires_defaults_to_config_deprecated_setting(self):
-        app = app_or_default()
-        prev = app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES
-        app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = 10
-        try:
-            b = self.create_backend()
-            self.assertEqual(b.queue_arguments.get('x-expires'), 10 * 1000.0)
-        finally:
-            app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = prev
-
     def test_expires_is_int(self):
         b = self.create_backend(expires=48)
         self.assertEqual(b.queue_arguments.get('x-expires'), 48 * 1000.0)
@@ -254,7 +243,7 @@ class test_AMQPBackend(AppCase):
 
         b = Backend()
         with self.assertRaises(KeyError):
-            b.get_many(['id1']).next()
+            next(b.get_many(['id1']))
 
     def test_test_get_many_raises_inner_block(self):
 
@@ -265,19 +254,19 @@ class test_AMQPBackend(AppCase):
 
         b = Backend()
         with self.assertRaises(KeyError):
-            b.get_many(['id1']).next()
+            next(b.get_many(['id1']))
 
     def test_no_expires(self):
         b = self.create_backend(expires=None)
         app = app_or_default()
-        prev = app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES
-        app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = None
+        prev = app.conf.CELERY_TASK_RESULT_EXPIRES
+        app.conf.CELERY_TASK_RESULT_EXPIRES = None
         try:
             b = self.create_backend(expires=None)
             with self.assertRaises(KeyError):
                 b.queue_arguments['x-expires']
         finally:
-            app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = prev
+            app.conf.CELERY_TASK_RESULT_EXPIRES = prev
 
     def test_process_cleanup(self):
         self.create_backend().process_cleanup()

+ 0 - 1
celery/tests/backends/test_backends.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from mock import patch
 

+ 0 - 1
celery/tests/backends/test_base.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import sys
 import types

+ 1 - 2
celery/tests/backends/test_cache.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import sys
 import types
@@ -51,7 +50,7 @@ class test_CacheBackend(Case):
     def test_mark_as_failure(self):
         try:
             raise KeyError('foo')
-        except KeyError, exception:
+        except KeyError as exception:
             self.tb.mark_as_failure(self.tid, exception)
             self.assertEqual(self.tb.get_status(self.tid), states.FAILURE)
             self.assertIsInstance(self.tb.get_result(self.tid), KeyError)

+ 0 - 1
celery/tests/backends/test_cassandra.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import socket
 

+ 2 - 22
celery/tests/backends/test_database.py

@@ -1,7 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
-
-import sys
 
 from datetime import datetime
 
@@ -50,23 +47,6 @@ class test_DatabaseBackend(Case):
             with self.assertRaises(ImproperlyConfigured):
                 _sqlalchemy_installed()
 
-    def test_pickle_hack_for_sqla_05(self):
-        import sqlalchemy as sa
-        from celery.backends.database import session
-        prev_base = session.ResultModelBase
-        prev_ver, sa.__version__ = sa.__version__, '0.5.0'
-        prev_models = sys.modules.pop('celery.backends.database.models', None)
-        try:
-            from sqlalchemy.ext.declarative import declarative_base
-            session.ResultModelBase = declarative_base()
-            from celery.backends.database.dfd042c7 import PickleType as Type1
-            from celery.backends.database.models import PickleType as Type2
-            self.assertIs(Type1, Type2)
-        finally:
-            sys.modules['celery.backends.database.models'] = prev_models
-            sa.__version__ = prev_ver
-            session.ResultModelBase = prev_base
-
     def test_missing_dburi_raises_ImproperlyConfigured(self):
         conf = app_or_default().conf
         prev, conf.CELERY_RESULT_DBURI = conf.CELERY_RESULT_DBURI, None
@@ -129,7 +109,7 @@ class test_DatabaseBackend(Case):
         tid = uuid()
         try:
             raise KeyError('foo')
-        except KeyError, exception:
+        except KeyError as exception:
             import traceback
             trace = '\n'.join(traceback.format_stack())
             tb.mark_as_retry(tid, exception, traceback=trace)
@@ -143,7 +123,7 @@ class test_DatabaseBackend(Case):
         tid3 = uuid()
         try:
             raise KeyError('foo')
-        except KeyError, exception:
+        except KeyError as exception:
             import traceback
             trace = '\n'.join(traceback.format_stack())
             tb.mark_as_failure(tid3, exception, traceback=trace)

+ 0 - 1
celery/tests/backends/test_mongodb.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import datetime
 import uuid

+ 0 - 1
celery/tests/backends/test_redis.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from datetime import timedelta
 

+ 0 - 2
celery/tests/bin/test_base.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import os
 
@@ -48,7 +47,6 @@ class test_Command(AppCase):
         cmd = Command()
         with self.assertRaises(SystemExit):
             cmd.early_version(['--version'])
-        stdout.write.assert_called_with(cmd.version + '\n')
 
     def test_execute_from_commandline(self):
         cmd = MockCommand()

+ 0 - 1
celery/tests/bin/test_camqadm.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from mock import Mock, patch
 

+ 0 - 3
celery/tests/bin/test_celery.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from anyjson import dumps
 from datetime import datetime
@@ -61,8 +60,6 @@ class test_Command(AppCase):
     def test_out(self):
         f = Mock()
         self.cmd.out('foo', f)
-        f.write.assert_called_with('foo\n')
-        self.cmd.out('foo\n', f)
 
     def test_call(self):
         self.cmd.run = Mock()

+ 0 - 1
celery/tests/bin/test_celerybeat.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import logging
 import sys

+ 3 - 7
celery/tests/bin/test_celeryd.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import logging
 import os
@@ -588,12 +587,9 @@ class test_signal_handlers(AppCase):
     @skip_if_pypy
     @skip_if_jython
     def test_worker_cry_handler(self, stderr):
-        if sys.version_info > (2, 5):
-            handlers = self.psig(cd.install_cry_handler)
-            self.assertIsNone(handlers['SIGUSR1']('SIGUSR1', object()))
-            self.assertTrue(stderr.write.called)
-        else:
-            raise SkipTest('Needs Python 2.5 or later')
+        handlers = self.psig(cd.install_cry_handler)
+        self.assertIsNone(handlers['SIGUSR1']('SIGUSR1', object()))
+        self.assertTrue(stderr.write.called)
 
     @disable_stdouts
     def test_worker_term_handler_only_stop_MainProcess(self):

+ 0 - 1
celery/tests/bin/test_celeryd_detach.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from mock import Mock, patch
 

+ 1 - 2
celery/tests/bin/test_celeryd_multi.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import errno
 import signal
@@ -188,7 +187,7 @@ class test_MultiTool(Case):
         pipe.wait.return_value = 2
         self.assertEqual(self.t.waitexec(['-m', 'foo'], 'path'), 2)
         self.t.note.assert_called_with(
-                '* Child terminated with failure code 2')
+                '* Child terminated with errorcode 2')
 
         pipe.wait.return_value = 0
         self.assertFalse(self.t.waitexec(['-m', 'foo', 'path']))

+ 0 - 1
celery/tests/bin/test_celeryev.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from nose import SkipTest
 from mock import patch as mpatch

+ 0 - 85
celery/tests/compat.py

@@ -1,85 +0,0 @@
-from __future__ import absolute_import
-
-import sys
-
-
-class WarningMessage(object):
-
-    """Holds the result of a single showwarning() call."""
-
-    _WARNING_DETAILS = ('message', 'category', 'filename', 'lineno', 'file',
-                        'line')
-
-    def __init__(self, message, category, filename, lineno, file=None,
-                    line=None):
-        local_values = locals()
-        for attr in self._WARNING_DETAILS:
-            setattr(self, attr, local_values[attr])
-
-        self._category_name = category and category.__name__ or None
-
-    def __str__(self):
-        return ('{message : %r, category : %r, filename : %r, lineno : %s, '
-                    'line : %r}' % (self.message, self._category_name,
-                                    self.filename, self.lineno, self.line))
-
-
-class catch_warnings(object):
-
-    """A context manager that copies and restores the warnings filter upon
-    exiting the context.
-
-    The 'record' argument specifies whether warnings should be captured by a
-    custom implementation of warnings.showwarning() and be appended to a list
-    returned by the context manager. Otherwise None is returned by the context
-    manager. The objects appended to the list are arguments whose attributes
-    mirror the arguments to showwarning().
-
-    The 'module' argument is to specify an alternative module to the module
-    named 'warnings' and imported under that name. This argument is only
-    useful when testing the warnings module itself.
-
-    """
-
-    def __init__(self, record=False, module=None):
-        """Specify whether to record warnings and if an alternative module
-        should be used other than sys.modules['warnings'].
-
-        For compatibility with Python 3.0, please consider all arguments to be
-        keyword-only.
-
-        """
-        self._record = record
-        self._module = module is None and sys.modules['warnings'] or module
-        self._entered = False
-
-    def __repr__(self):
-        args = []
-        if self._record:
-            args.append('record=True')
-        if self._module is not sys.modules['warnings']:
-            args.append('module=%r' % self._module)
-        name = type(self).__name__
-        return '%s(%s)' % (name, ', '.join(args))
-
-    def __enter__(self):
-        if self._entered:
-            raise RuntimeError('Cannot enter %r twice' % self)
-        self._entered = True
-        self._filters = self._module.filters
-        self._module.filters = self._filters[:]
-        self._showwarning = self._module.showwarning
-        if self._record:
-            log = []
-
-            def showwarning(*args, **kwargs):
-                log.append(WarningMessage(*args, **kwargs))
-
-            self._module.showwarning = showwarning
-            return log
-
-    def __exit__(self, *exc_info):
-        if not self._entered:
-            raise RuntimeError('Cannot exit %r without entering first' % self)
-        self._module.filters = self._filters
-        self._module.showwarning = self._showwarning

+ 3 - 3
celery/tests/compat_modules/test_decorators.py

@@ -1,9 +1,9 @@
 from __future__ import absolute_import
-from __future__ import with_statement
+
+import warnings
 
 from celery.task import base
 
-from celery.tests.compat import catch_warnings
 from celery.tests.utils import Case
 
 
@@ -14,7 +14,7 @@ def add(x, y):
 class test_decorators(Case):
 
     def setUp(self):
-        with catch_warnings(record=True):
+        with warnings.catch_warnings(record=True):
             from celery import decorators
             self.decorators = decorators
 

+ 0 - 1
celery/tests/concurrency/test_concurrency.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import os
 

+ 0 - 1
celery/tests/concurrency/test_eventlet.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import os
 import sys

+ 0 - 1
celery/tests/concurrency/test_gevent.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import os
 import sys

+ 0 - 1
celery/tests/concurrency/test_processes.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import signal
 import time

+ 0 - 1
celery/tests/concurrency/test_threads.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from mock import Mock
 

+ 0 - 1
celery/tests/contrib/test_migrate.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from kombu import Connection, Producer, Queue, Exchange
 from kombu.exceptions import StdChannelError

+ 0 - 1
celery/tests/contrib/test_rdb.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import errno
 import socket

+ 2 - 6
celery/tests/events/test_events.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import socket
 
@@ -164,12 +163,9 @@ class test_EventReceiver(AppCase):
         try:
             r = self.app.events.Receiver(connection, node_id='celery.tests')
             it = r.itercapture(timeout=0.0001, wakeup=False)
-            consumer = it.next()
-            self.assertTrue(consumer.queues)
-            self.assertEqual(consumer.callbacks[0], r._receive)
 
             with self.assertRaises(socket.timeout):
-                it.next()
+                next(it)
 
             with self.assertRaises(socket.timeout):
                 r.capture(timeout=0.00001)
@@ -195,7 +191,7 @@ class test_EventReceiver(AppCase):
             for ev in evs:
                 producer.send(ev)
             it = r.itercapture(limit=4, wakeup=True)
-            it.next()  # skip consumer (see itercapture)
+            next(it)  # skip consumer (see itercapture)
             list(it)
             self.assertEqual(events_received[0], 4)
         finally:

+ 0 - 1
celery/tests/events/test_snapshot.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from mock import patch
 

+ 8 - 8
celery/tests/events/test_state.py

@@ -167,7 +167,7 @@ class test_State(Case):
 
     def test_worker_online_offline(self):
         r = ev_worker_online_offline(State())
-        r.next()
+        next(r)
         self.assertTrue(r.state.alive_workers())
         self.assertTrue(r.state.workers['utest1'].alive)
         r.play()
@@ -181,7 +181,7 @@ class test_State(Case):
 
     def test_worker_heartbeat_expire(self):
         r = ev_worker_heartbeats(State())
-        r.next()
+        next(r)
         self.assertFalse(r.state.alive_workers())
         self.assertFalse(r.state.workers['utest1'].alive)
         r.play()
@@ -192,7 +192,7 @@ class test_State(Case):
         r = ev_task_states(State())
 
         # RECEIVED
-        r.next()
+        next(r)
         self.assertTrue(r.tid in r.state.tasks)
         task = r.state.tasks[r.tid]
         self.assertEqual(task.state, states.RECEIVED)
@@ -201,7 +201,7 @@ class test_State(Case):
         self.assertEqual(task.worker.hostname, 'utest1')
 
         # STARTED
-        r.next()
+        next(r)
         self.assertTrue(r.state.workers['utest1'].alive,
                 'any task event adds worker heartbeat')
         self.assertEqual(task.state, states.STARTED)
@@ -210,14 +210,14 @@ class test_State(Case):
         self.assertEqual(task.worker.hostname, 'utest1')
 
         # REVOKED
-        r.next()
+        next(r)
         self.assertEqual(task.state, states.REVOKED)
         self.assertTrue(task.revoked)
         self.assertEqual(task.timestamp, task.revoked)
         self.assertEqual(task.worker.hostname, 'utest1')
 
         # RETRY
-        r.next()
+        next(r)
         self.assertEqual(task.state, states.RETRY)
         self.assertTrue(task.retried)
         self.assertEqual(task.timestamp, task.retried)
@@ -226,7 +226,7 @@ class test_State(Case):
         self.assertEqual(task.traceback, 'line 2 at main')
 
         # FAILURE
-        r.next()
+        next(r)
         self.assertEqual(task.state, states.FAILURE)
         self.assertTrue(task.failed)
         self.assertEqual(task.timestamp, task.failed)
@@ -235,7 +235,7 @@ class test_State(Case):
         self.assertEqual(task.traceback, 'line 1 at main')
 
         # SUCCESS
-        r.next()
+        next(r)
         self.assertEqual(task.state, states.SUCCESS)
         self.assertTrue(task.succeeded)
         self.assertEqual(task.timestamp, task.succeeded)

+ 1 - 2
celery/tests/security/test_certificate.py

@@ -1,5 +1,4 @@
 from __future__ import absolute_import
-from __future__ import with_statement
 
 from celery.exceptions import SecurityError
 from celery.security.certificate import Certificate, CertStore, FSCertStore
@@ -26,7 +25,7 @@ class test_Certificate(SecurityCase):
         self.assertRaises(SecurityError, Certificate, KEY1)
 
     def test_has_expired(self):
-        self.assertTrue(Certificate(CERT1).has_expired())
+        self.assertFalse(Certificate(CERT1).has_expired())
 
 
 class test_CertStore(SecurityCase):

+ 0 - 1
celery/tests/security/test_security.py

@@ -13,7 +13,6 @@ Generated with::
 
 """
 from __future__ import absolute_import
-from __future__ import with_statement
 
 import __builtin__
 

Some files were not shown because too many files changed in this diff