Browse Source

Merge branch '3.0'

Conflicts:
	celery/app/abstract.py
	celery/app/amqp.py
	celery/app/base.py
	celery/app/builtins.py
	celery/app/defaults.py
	celery/app/log.py
	celery/app/registry.py
	celery/app/task.py
	celery/app/utils.py
	celery/apps/beat.py
	celery/apps/worker.py
	celery/backends/__init__.py
	celery/backends/amqp.py
	celery/backends/base.py
	celery/backends/database/dfd042c7.py
	celery/backends/mongodb.py
	celery/backends/redis.py
	celery/beat.py
	celery/bin/base.py
	celery/bin/celery.py
	celery/bin/celeryd_detach.py
	celery/bin/multi.py
	celery/bin/worker.py
	celery/canvas.py
	celery/concurrency/base.py
	celery/contrib/migrate.py
	celery/events/__init__.py
	celery/events/cursesmon.py
	celery/events/dumper.py
	celery/events/state.py
	celery/five.py
	celery/loaders/base.py
	celery/local.py
	celery/schedules.py
	celery/security/serialization.py
	celery/task/base.py
	celery/task/http.py
	celery/task/sets.py
	celery/tests/app/test_app.py
	celery/tests/app/test_control.py
	celery/tests/backends/test_cache.py
	celery/tests/backends/test_database.py
	celery/tests/bin/test_amqp.py
	celery/tests/bin/test_beat.py
	celery/tests/bin/test_celery.py
	celery/tests/bin/test_multi.py
	celery/tests/compat.py
	celery/tests/concurrency/test_concurrency.py
	celery/tests/slow/test_buckets.py
	celery/tests/tasks/test_chord.py
	celery/tests/tasks/test_tasks.py
	celery/tests/utilities/test_datastructures.py
	celery/tests/utilities/test_imports.py
	celery/tests/utilities/test_info.py
	celery/tests/utilities/test_pickle.py
	celery/tests/utilities/test_platforms.py
	celery/tests/utilities/test_term.py
	celery/tests/utils.py
	celery/tests/worker/test_control.py
	celery/tests/worker/test_hub.py
	celery/tests/worker/test_worker.py
	celery/utils/__init__.py
	celery/utils/compat.py
	celery/utils/dispatch/saferef.py
	celery/utils/log.py
	celery/utils/mail.py
	celery/utils/timer2.py
	celery/utils/timeutils.py
	celery/worker/__init__.py
	celery/worker/bootsteps.py
	celery/worker/buckets.py
	celery/worker/consumer.py
	celery/worker/control.py
	celery/worker/heartbeat.py
	celery/worker/job.py
	celery/worker/state.py
	pavement.py
Ask Solem 13 years ago
parent
commit
5bc6e57c83
100 changed files with 1100 additions and 889 deletions
  1. 4 3
      celery/__init__.py
  2. 4 2
      celery/__main__.py
  3. 4 2
      celery/_state.py
  4. 7 6
      celery/app/__init__.py
  5. 3 3
      celery/app/abstract.py
  6. 50 43
      celery/app/amqp.py
  7. 59 50
      celery/app/base.py
  8. 23 18
      celery/app/builtins.py
  9. 32 26
      celery/app/control.py
  10. 5 5
      celery/app/defaults.py
  11. 26 19
      celery/app/log.py
  12. 1 1
      celery/app/registry.py
  13. 2 2
      celery/app/routes.py
  14. 14 12
      celery/app/task.py
  15. 4 3
      celery/app/utils.py
  16. 5 4
      celery/apps/beat.py
  17. 13 9
      celery/apps/worker.py
  18. 1 1
      celery/backends/__init__.py
  19. 29 26
      celery/backends/amqp.py
  20. 18 15
      celery/backends/base.py
  21. 8 7
      celery/backends/cassandra.py
  22. 20 16
      celery/backends/database/__init__.py
  23. 3 3
      celery/backends/database/models.py
  24. 4 8
      celery/backends/mongodb.py
  25. 1 1
      celery/backends/redis.py
  26. 1 1
      celery/backends/rpc.py
  27. 28 22
      celery/beat.py
  28. 5 5
      celery/bin/amqp.py
  29. 12 11
      celery/bin/base.py
  30. 71 66
      celery/bin/celery.py
  31. 6 5
      celery/bin/celeryd_detach.py
  32. 2 2
      celery/bin/events.py
  33. 1 1
      celery/bin/graph.py
  34. 12 10
      celery/bin/multi.py
  35. 9 9
      celery/bin/worker.py
  36. 4 3
      celery/bootsteps.py
  37. 39 25
      celery/canvas.py
  38. 5 5
      celery/concurrency/base.py
  39. 4 3
      celery/concurrency/eventlet.py
  40. 2 1
      celery/concurrency/gevent.py
  41. 2 2
      celery/concurrency/threads.py
  42. 7 5
      celery/contrib/batches.py
  43. 32 16
      celery/contrib/bundles.py
  44. 12 12
      celery/contrib/migrate.py
  45. 5 4
      celery/contrib/rdb.py
  46. 3 3
      celery/datastructures.py
  47. 11 10
      celery/events/__init__.py
  48. 48 34
      celery/events/cursesmon.py
  49. 21 18
      celery/events/dumper.py
  50. 3 3
      celery/events/snapshot.py
  51. 17 12
      celery/events/state.py
  52. 6 6
      celery/five.py
  53. 1 1
      celery/loaders/__init__.py
  54. 26 24
      celery/loaders/base.py
  55. 1 1
      celery/loaders/default.py
  56. 1 1
      celery/local.py
  57. 2 2
      celery/platforms.py
  58. 8 7
      celery/result.py
  59. 37 40
      celery/schedules.py
  60. 1 1
      celery/security/__init__.py
  61. 16 13
      celery/security/serialization.py
  62. 2 1
      celery/task/__init__.py
  63. 13 11
      celery/task/base.py
  64. 2 2
      celery/task/http.py
  65. 1 1
      celery/task/sets.py
  66. 2 2
      celery/task/trace.py
  67. 2 2
      celery/tests/__init__.py
  68. 2 2
      celery/tests/app/test_amqp.py
  69. 18 15
      celery/tests/app/test_app.py
  70. 1 1
      celery/tests/app/test_beat.py
  71. 3 2
      celery/tests/app/test_builtins.py
  72. 1 1
      celery/tests/app/test_celery.py
  73. 4 3
      celery/tests/app/test_control.py
  74. 16 13
      celery/tests/app/test_loaders.py
  75. 7 5
      celery/tests/app/test_log.py
  76. 11 8
      celery/tests/app/test_routes.py
  77. 6 6
      celery/tests/backends/test_amqp.py
  78. 2 2
      celery/tests/backends/test_base.py
  79. 2 2
      celery/tests/backends/test_cache.py
  80. 9 8
      celery/tests/backends/test_cassandra.py
  81. 6 5
      celery/tests/backends/test_database.py
  82. 4 2
      celery/tests/backends/test_mongodb.py
  83. 4 2
      celery/tests/backends/test_redis.py
  84. 8 4
      celery/tests/bin/test_amqp.py
  85. 3 2
      celery/tests/bin/test_base.py
  86. 3 2
      celery/tests/bin/test_beat.py
  87. 8 4
      celery/tests/bin/test_celery.py
  88. 5 4
      celery/tests/bin/test_celeryd_detach.py
  89. 76 50
      celery/tests/bin/test_multi.py
  90. 13 11
      celery/tests/bin/test_worker.py
  91. 5 4
      celery/tests/concurrency/test_concurrency.py
  92. 2 2
      celery/tests/concurrency/test_gevent.py
  93. 4 4
      celery/tests/concurrency/test_pool.py
  94. 6 3
      celery/tests/concurrency/test_threads.py
  95. 2 2
      celery/tests/config.py
  96. 17 7
      celery/tests/contrib/test_migrate.py
  97. 2 2
      celery/tests/events/test_events.py
  98. 10 10
      celery/tests/events/test_state.py
  99. 16 12
      celery/tests/functional/case.py
  100. 1 1
      celery/tests/security/test_security.py

+ 4 - 3
celery/__init__.py

@@ -30,8 +30,8 @@ if os.environ.get('C_IMPDEBUG'):
     from .five import builtins
     from .five import builtins
     real_import = builtins.__import__
     real_import = builtins.__import__
 
 
-    def debug_import(name, locals=None, globals=None, fromlist=None,
-            level=-1):
+    def debug_import(name, locals=None, globals=None,
+                     fromlist=None, level=-1):
         glob = globals or getattr(sys, 'emarfteg_'[::-1])(1).f_globals
         glob = globals or getattr(sys, 'emarfteg_'[::-1])(1).f_globals
         importer_name = glob and glob.get('__name__') or 'unknown'
         importer_name = glob and glob.get('__name__') or 'unknown'
         print('-- {0} imports {1}'.format(importer_name, name))
         print('-- {0} imports {1}'.format(importer_name, name))
@@ -56,7 +56,8 @@ if STATICA_HACK:
 # Lazy loading
 # Lazy loading
 from .five import recreate_module
 from .five import recreate_module
 
 
-old_module, new_module = recreate_module(__name__,  # pragma: no cover
+old_module, new_module = recreate_module(  # pragma: no cover
+    __name__,
     by_module={
     by_module={
         'celery.app': ['Celery', 'bugreport', 'shared_task'],
         'celery.app': ['Celery', 'bugreport', 'shared_task'],
         'celery.app.task': ['Task'],
         'celery.app.task': ['Task'],

+ 4 - 2
celery/__main__.py

@@ -13,8 +13,10 @@ $ {new_argv}
 
 
 
 
 def _warn_deprecated(new):
 def _warn_deprecated(new):
-    print(DEPRECATED_FMT.format(old=basename(sys.argv[0]), new=new,
-            new_argv=' '.join([new] + sys.argv[1:])))
+    print(DEPRECATED_FMT.format(
+        old=basename(sys.argv[0]), new=new,
+        new_argv=' '.join([new] + sys.argv[1:])),
+    )
 
 
 
 
 def maybe_patch_concurrency():
 def maybe_patch_concurrency():

+ 4 - 2
celery/_state.py

@@ -44,9 +44,11 @@ def get_current_app():
     if default_app is None:
     if default_app is None:
         #: creates the global fallback app instance.
         #: creates the global fallback app instance.
         from celery.app import Celery
         from celery.app import Celery
-        set_default_app(Celery('default',
+        set_default_app(Celery(
+            'default',
             loader=os.environ.get('CELERY_LOADER') or 'default',
             loader=os.environ.get('CELERY_LOADER') or 'default',
-            set_as_current=False, accept_magic_kwargs=True))
+            set_as_current=False, accept_magic_kwargs=True,
+        ))
     return _tls.current_app or default_app
     return _tls.current_app or default_app
 
 
 
 

+ 7 - 6
celery/app/__init__.py

@@ -15,10 +15,10 @@ from collections import Callable
 from celery.local import Proxy
 from celery.local import Proxy
 from celery import _state
 from celery import _state
 from celery._state import (  # noqa
 from celery._state import (  # noqa
-        set_default_app,
-        get_current_app as current_app,
-        get_current_task as current_task,
-        _get_active_apps,
+    set_default_app,
+    get_current_app as current_app,
+    get_current_task as current_task,
+    _get_active_apps,
 )
 )
 from celery.utils import gen_task_name
 from celery.utils import gen_task_name
 
 
@@ -126,8 +126,9 @@ def shared_task(*args, **kwargs):
             # apps task registry.
             # apps task registry.
             def task_by_cons():
             def task_by_cons():
                 app = current_app()
                 app = current_app()
-                return app.tasks[name or gen_task_name(app,
-                            fun.__name__, fun.__module__)]
+                return app.tasks[
+                    name or gen_task_name(app, fun.__name__, fun.__module__)
+                ]
             return Proxy(task_by_cons)
             return Proxy(task_by_cons)
         return __inner
         return __inner
 
 

+ 3 - 3
celery/app/abstract.py

@@ -25,8 +25,8 @@ class _configurated(type):
 
 
     def __new__(cls, name, bases, attrs):
     def __new__(cls, name, bases, attrs):
         attrs['__confopts__'] = dict((attr, spec.get_key(attr))
         attrs['__confopts__'] = dict((attr, spec.get_key(attr))
-                                          for attr, spec in items(attrs)
-                                              if isinstance(spec, from_config))
+                                     for attr, spec in items(attrs)
+                                     if isinstance(spec, from_config))
         inherit_from = attrs.get('inherit_confopts', ())
         inherit_from = attrs.get('inherit_confopts', ())
         for subcls in bases:
         for subcls in bases:
             try:
             try:
@@ -36,7 +36,7 @@ class _configurated(type):
         for subcls in inherit_from:
         for subcls in inherit_from:
             attrs['__confopts__'].update(subcls.__confopts__)
             attrs['__confopts__'].update(subcls.__confopts__)
         attrs = dict((k, v if not isinstance(v, from_config) else None)
         attrs = dict((k, v if not isinstance(v, from_config) else None)
-                        for k, v in items(attrs))
+                     for k, v in items(attrs))
         return super(_configurated, cls).__new__(cls, name, bases, attrs)
         return super(_configurated, cls).__new__(cls, name, bases, attrs)
 
 
 
 

+ 50 - 43
celery/app/amqp.py

@@ -48,7 +48,7 @@ class Queues(dict):
     _consume_from = None
     _consume_from = None
 
 
     def __init__(self, queues=None, default_exchange=None,
     def __init__(self, queues=None, default_exchange=None,
-            create_missing=True, ha_policy=None):
+                 create_missing=True, ha_policy=None):
         dict.__init__(self)
         dict.__init__(self)
         self.aliases = WeakValueDictionary()
         self.aliases = WeakValueDictionary()
         self.default_exchange = default_exchange
         self.default_exchange = default_exchange
@@ -120,7 +120,7 @@ class Queues(dict):
         if not active:
         if not active:
             return ''
             return ''
         info = [QUEUE_FORMAT.strip().format(q)
         info = [QUEUE_FORMAT.strip().format(q)
-                    for _, q in sorted(items(active))]
+                for _, q in sorted(items(active))]
         if indent_first:
         if indent_first:
             return textindent('\n'.join(info), indent)
             return textindent('\n'.join(info), indent)
         return info[0] + '\n' + textindent('\n'.join(info[1:]), indent)
         return info[0] + '\n' + textindent('\n'.join(info[1:]), indent)
@@ -167,22 +167,22 @@ class TaskProducer(Producer):
     def __init__(self, channel=None, exchange=None, *args, **kwargs):
     def __init__(self, channel=None, exchange=None, *args, **kwargs):
         self.retry = kwargs.pop('retry', self.retry)
         self.retry = kwargs.pop('retry', self.retry)
         self.retry_policy = kwargs.pop('retry_policy',
         self.retry_policy = kwargs.pop('retry_policy',
-                                        self.retry_policy or {})
+                                       self.retry_policy or {})
         exchange = exchange or self.exchange
         exchange = exchange or self.exchange
         self.queues = self.app.amqp.queues  # shortcut
         self.queues = self.app.amqp.queues  # shortcut
         self.default_queue = self.app.amqp.default_queue
         self.default_queue = self.app.amqp.default_queue
         super(TaskProducer, self).__init__(channel, exchange, *args, **kwargs)
         super(TaskProducer, self).__init__(channel, exchange, *args, **kwargs)
 
 
     def publish_task(self, task_name, task_args=None, task_kwargs=None,
     def publish_task(self, task_name, task_args=None, task_kwargs=None,
-            countdown=None, eta=None, task_id=None, group_id=None,
-            taskset_id=None,  # compat alias to group_id
-            expires=None, exchange=None, exchange_type=None,
-            event_dispatcher=None, retry=None, retry_policy=None,
-            queue=None, now=None, retries=0, chord=None, callbacks=None,
-            errbacks=None, routing_key=None, serializer=None,
-            delivery_mode=None, compression=None, reply_to=None,
-            timeout=None, soft_timeout=None, timeouts=None,
-            declare=None, **kwargs):
+                     countdown=None, eta=None, task_id=None, group_id=None,
+                     taskset_id=None,  # compat alias to group_id
+                     expires=None, exchange=None, exchange_type=None,
+                     event_dispatcher=None, retry=None, retry_policy=None,
+                     queue=None, now=None, retries=0, chord=None,
+                     callbacks=None, errbacks=None, routing_key=None,
+                     serializer=None, delivery_mode=None, compression=None,
+                     reply_to=None, timeout=None, soft_timeout=None,
+                     timeouts=None, declare=None, **kwargs):
         """Send task message."""
         """Send task message."""
         retry = self.retry if retry is None else retry
         retry = self.retry if retry is None else retry
 
 
@@ -201,7 +201,7 @@ class TaskProducer(Producer):
         # merge default and custom policy
         # merge default and custom policy
         retry = self.retry if retry is None else retry
         retry = self.retry if retry is None else retry
         _rp = (dict(self.retry_policy, **retry_policy) if retry_policy
         _rp = (dict(self.retry_policy, **retry_policy) if retry_policy
-                                                       else self.retry_policy)
+               else self.retry_policy)
         task_id = task_id or uuid()
         task_id = task_id or uuid()
         task_args = task_args or []
         task_args = task_args or []
         task_kwargs = task_kwargs or {}
         task_kwargs = task_kwargs or {}
@@ -235,29 +235,33 @@ class TaskProducer(Producer):
             'chord': chord,
             'chord': chord,
         }
         }
 
 
-        self.publish(body,
-             exchange=exchange, routing_key=routing_key,
-             serializer=serializer or self.serializer,
-             compression=compression or self.compression,
-             retry=retry, retry_policy=_rp,
-             delivery_mode=delivery_mode, declare=declare,
-             **kwargs)
+        self.publish(
+            body,
+            exchange=exchange, routing_key=routing_key,
+            serializer=serializer or self.serializer,
+            compression=compression or self.compression,
+            retry=retry, retry_policy=_rp,
+            delivery_mode=delivery_mode, declare=declare,
+            **kwargs
+        )
 
 
         signals.task_sent.send(sender=task_name, **body)
         signals.task_sent.send(sender=task_name, **body)
         if event_dispatcher:
         if event_dispatcher:
             exname = exchange or self.exchange
             exname = exchange or self.exchange
             if isinstance(exname, Exchange):
             if isinstance(exname, Exchange):
                 exname = exname.name
                 exname = exname.name
-            event_dispatcher.send('task-sent', uuid=task_id,
-                                               name=task_name,
-                                               args=safe_repr(task_args),
-                                               kwargs=safe_repr(task_kwargs),
-                                               retries=retries,
-                                               eta=eta,
-                                               expires=expires,
-                                               queue=qname,
-                                               exchange=exname,
-                                               routing_key=routing_key)
+            event_dispatcher.send(
+                'task-sent', uuid=task_id,
+                name=task_name,
+                args=safe_repr(task_args),
+                kwargs=safe_repr(task_kwargs),
+                retries=retries,
+                eta=eta,
+                expires=expires,
+                queue=qname,
+                exchange=exname,
+                routing_key=routing_key,
+            )
         return task_id
         return task_id
     delay_task = publish_task   # XXX Compat
     delay_task = publish_task   # XXX Compat
 
 
@@ -269,7 +273,7 @@ class TaskPublisher(TaskProducer):
         self.app = app_or_default(kwargs.pop('app', self.app))
         self.app = app_or_default(kwargs.pop('app', self.app))
         self.retry = kwargs.pop('retry', self.retry)
         self.retry = kwargs.pop('retry', self.retry)
         self.retry_policy = kwargs.pop('retry_policy',
         self.retry_policy = kwargs.pop('retry_policy',
-                                        self.retry_policy or {})
+                                       self.retry_policy or {})
         exchange = exchange or self.exchange
         exchange = exchange or self.exchange
         if not isinstance(exchange, Exchange):
         if not isinstance(exchange, Exchange):
             exchange = Exchange(exchange,
             exchange = Exchange(exchange,
@@ -283,9 +287,10 @@ class TaskConsumer(Consumer):
 
 
     def __init__(self, channel, queues=None, app=None, **kw):
     def __init__(self, channel, queues=None, app=None, **kw):
         self.app = app or self.app
         self.app = app or self.app
-        super(TaskConsumer, self).__init__(channel,
-                queues or list(self.app.amqp.queues.consume_from.values()),
-                **kw)
+        super(TaskConsumer, self).__init__(
+            channel,
+            queues or list(self.app.amqp.queues.consume_from.values()), **kw
+        )
 
 
 
 
 class AMQP(object):
 class AMQP(object):
@@ -344,15 +349,17 @@ class AMQP(object):
 
 
         """
         """
         conf = self.app.conf
         conf = self.app.conf
-        return self.app.subclass_with_self(TaskProducer,
-                reverse='amqp.TaskProducer',
-                exchange=self.default_exchange,
-                routing_key=conf.CELERY_DEFAULT_ROUTING_KEY,
-                serializer=conf.CELERY_TASK_SERIALIZER,
-                compression=conf.CELERY_MESSAGE_COMPRESSION,
-                retry=conf.CELERY_TASK_PUBLISH_RETRY,
-                retry_policy=conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
-                utc=conf.CELERY_ENABLE_UTC)
+        return self.app.subclass_with_self(
+            TaskProducer,
+            reverse='amqp.TaskProducer',
+            exchange=self.default_exchange,
+            routing_key=conf.CELERY_DEFAULT_ROUTING_KEY,
+            serializer=conf.CELERY_TASK_SERIALIZER,
+            compression=conf.CELERY_MESSAGE_COMPRESSION,
+            retry=conf.CELERY_TASK_PUBLISH_RETRY,
+            retry_policy=conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
+            utc=conf.CELERY_ENABLE_UTC,
+        )
     TaskPublisher = TaskProducer  # compat
     TaskPublisher = TaskProducer  # compat
 
 
     @cached_property
     @cached_property

+ 59 - 50
celery/app/base.py

@@ -75,10 +75,10 @@ class Celery(object):
     _pool = None
     _pool = None
 
 
     def __init__(self, main=None, loader=None, backend=None,
     def __init__(self, main=None, loader=None, backend=None,
-            amqp=None, events=None, log=None, control=None,
-            set_as_current=True, accept_magic_kwargs=False,
-            tasks=None, broker=None, include=None, fixups=None,
-            changes=None, **kwargs):
+                 amqp=None, events=None, log=None, control=None,
+                 set_as_current=True, accept_magic_kwargs=False,
+                 tasks=None, broker=None, include=None, fixups=None,
+                 changes=None, **kwargs):
         self.clock = LamportClock()
         self.clock = LamportClock()
         self.main = main
         self.main = main
         self.amqp_cls = amqp or self.amqp_cls
         self.amqp_cls = amqp or self.amqp_cls
@@ -143,12 +143,14 @@ class Celery(object):
         pass
         pass
 
 
     def start(self, argv=None):
     def start(self, argv=None):
-        return instantiate('celery.bin.celery:CeleryCommand', app=self) \
-                    .execute_from_commandline(argv)
+        return instantiate(
+            'celery.bin.celery:CeleryCommand',
+            app=self).execute_from_commandline(argv)
 
 
     def worker_main(self, argv=None):
     def worker_main(self, argv=None):
-        return instantiate('celery.bin.worker:worker', app=self) \
-                    .execute_from_commandline(argv)
+        return instantiate(
+            'celery.bin.worker:worker',
+            app=self).execute_from_commandline(argv)
 
 
     def task(self, *args, **opts):
     def task(self, *args, **opts):
         """Creates new task class from any callable."""
         """Creates new task class from any callable."""
@@ -192,11 +194,11 @@ class Celery(object):
         base = options.pop('base', None) or self.Task
         base = options.pop('base', None) or self.Task
 
 
         T = type(fun.__name__, (base, ), dict({
         T = type(fun.__name__, (base, ), dict({
-                'app': self,
-                'accept_magic_kwargs': False,
-                'run': staticmethod(fun),
-                '__doc__': fun.__doc__,
-                '__module__': fun.__module__}, **options))()
+            'app': self,
+            'accept_magic_kwargs': False,
+            'run': staticmethod(fun),
+            '__doc__': fun.__doc__,
+            '__module__': fun.__module__}, **options))()
         task = self._tasks[T.name]  # return global instance.
         task = self._tasks[T.name]  # return global instance.
         task.bind(self)
         task.bind(self)
         return task
         return task
@@ -236,9 +238,9 @@ class Celery(object):
         self.loader.autodiscover_tasks(packages, related_name)
         self.loader.autodiscover_tasks(packages, related_name)
 
 
     def send_task(self, name, args=None, kwargs=None, countdown=None,
     def send_task(self, name, args=None, kwargs=None, countdown=None,
-            eta=None, task_id=None, producer=None, connection=None,
-            result_cls=None, expires=None, queues=None, publisher=None,
-            **options):
+                  eta=None, task_id=None, producer=None, connection=None,
+                  result_cls=None, expires=None, queues=None, publisher=None,
+                  **options):
         producer = producer or publisher  # XXX compat
         producer = producer or publisher  # XXX compat
         if self.conf.CELERY_ALWAYS_EAGER:  # pragma: no cover
         if self.conf.CELERY_ALWAYS_EAGER:  # pragma: no cover
             warnings.warn(AlwaysEagerIgnored(
             warnings.warn(AlwaysEagerIgnored(
@@ -250,33 +252,36 @@ class Celery(object):
                            self.conf.CELERY_MESSAGE_COMPRESSION)
                            self.conf.CELERY_MESSAGE_COMPRESSION)
         options = router.route(options, name, args, kwargs)
         options = router.route(options, name, args, kwargs)
         with self.producer_or_acquire(producer) as producer:
         with self.producer_or_acquire(producer) as producer:
-            return result_cls(producer.publish_task(name, args, kwargs,
-                        task_id=task_id,
-                        countdown=countdown, eta=eta,
-                        expires=expires, **options))
+            return result_cls(producer.publish_task(
+                name, args, kwargs,
+                task_id=task_id,
+                countdown=countdown, eta=eta,
+                expires=expires, **options
+            ))
 
 
     def connection(self, hostname=None, userid=None, password=None,
     def connection(self, hostname=None, userid=None, password=None,
-            virtual_host=None, port=None, ssl=None, connect_timeout=None,
-            transport=None, transport_options=None, heartbeat=None, **kwargs):
+                   virtual_host=None, port=None, ssl=None,
+                   connect_timeout=None, transport=None,
+                   transport_options=None, heartbeat=None, **kwargs):
         conf = self.conf
         conf = self.conf
         return self.amqp.Connection(
         return self.amqp.Connection(
-                    hostname or conf.BROKER_HOST,
-                    userid or conf.BROKER_USER,
-                    password or conf.BROKER_PASSWORD,
-                    virtual_host or conf.BROKER_VHOST,
-                    port or conf.BROKER_PORT,
-                    transport=transport or conf.BROKER_TRANSPORT,
-                    ssl=self.either('BROKER_USE_SSL', ssl),
-                    connect_timeout=self.either(
-                        'BROKER_CONNECTION_TIMEOUT', connect_timeout),
-                    heartbeat=heartbeat,
-                    transport_options=dict(conf.BROKER_TRANSPORT_OPTIONS,
-                                           **transport_options or {}))
+            hostname or conf.BROKER_HOST,
+            userid or conf.BROKER_USER,
+            password or conf.BROKER_PASSWORD,
+            virtual_host or conf.BROKER_VHOST,
+            port or conf.BROKER_PORT,
+            transport=transport or conf.BROKER_TRANSPORT,
+            ssl=self.either('BROKER_USE_SSL', ssl),
+            connect_timeout=self.either(
+                'BROKER_CONNECTION_TIMEOUT', connect_timeout),
+            heartbeat=heartbeat,
+            transport_options=dict(conf.BROKER_TRANSPORT_OPTIONS,
+                                   **transport_options or {}))
     broker_connection = connection
     broker_connection = connection
 
 
     @contextmanager
     @contextmanager
     def connection_or_acquire(self, connection=None, pool=True,
     def connection_or_acquire(self, connection=None, pool=True,
-            *args, **kwargs):
+                              *args, **kwargs):
         if connection:
         if connection:
             yield connection
             yield connection
         else:
         else:
@@ -327,15 +332,17 @@ class Celery(object):
     def mail_admins(self, subject, body, fail_silently=False):
     def mail_admins(self, subject, body, fail_silently=False):
         if self.conf.ADMINS:
         if self.conf.ADMINS:
             to = [admin_email for _, admin_email in self.conf.ADMINS]
             to = [admin_email for _, admin_email in self.conf.ADMINS]
-            return self.loader.mail_admins(subject, body, fail_silently, to=to,
-                                       sender=self.conf.SERVER_EMAIL,
-                                       host=self.conf.EMAIL_HOST,
-                                       port=self.conf.EMAIL_PORT,
-                                       user=self.conf.EMAIL_HOST_USER,
-                                       password=self.conf.EMAIL_HOST_PASSWORD,
-                                       timeout=self.conf.EMAIL_TIMEOUT,
-                                       use_ssl=self.conf.EMAIL_USE_SSL,
-                                       use_tls=self.conf.EMAIL_USE_TLS)
+            return self.loader.mail_admins(
+                subject, body, fail_silently, to=to,
+                sender=self.conf.SERVER_EMAIL,
+                host=self.conf.EMAIL_HOST,
+                port=self.conf.EMAIL_PORT,
+                user=self.conf.EMAIL_HOST_USER,
+                password=self.conf.EMAIL_HOST_PASSWORD,
+                timeout=self.conf.EMAIL_TIMEOUT,
+                use_ssl=self.conf.EMAIL_USE_SSL,
+                use_tls=self.conf.EMAIL_USE_TLS,
+            )
 
 
     def select_queues(self, queues=None):
     def select_queues(self, queues=None):
         return self.amqp.queues.select_subset(queues)
         return self.amqp.queues.select_subset(queues)
@@ -351,14 +358,14 @@ class Celery(object):
     def _get_backend(self):
     def _get_backend(self):
         from celery.backends import get_backend_by_url
         from celery.backends import get_backend_by_url
         backend, url = get_backend_by_url(
         backend, url = get_backend_by_url(
-                self.backend_cls or self.conf.CELERY_RESULT_BACKEND,
-                self.loader)
+            self.backend_cls or self.conf.CELERY_RESULT_BACKEND,
+            self.loader)
         return backend(app=self, url=url)
         return backend(app=self, url=url)
 
 
     def _get_config(self):
     def _get_config(self):
         self.configured = True
         self.configured = True
         s = Settings({}, [self.prepare_config(self.loader.conf),
         s = Settings({}, [self.prepare_config(self.loader.conf),
-                             deepcopy(DEFAULTS)])
+                          deepcopy(DEFAULTS)])
 
 
         # load lazy config dict initializers.
         # load lazy config dict initializers.
         pending = self._pending_defaults
         pending = self._pending_defaults
@@ -388,7 +395,7 @@ class Celery(object):
                                        attribute='_app', abstract=True)
                                        attribute='_app', abstract=True)
 
 
     def subclass_with_self(self, Class, name=None, attribute='app',
     def subclass_with_self(self, Class, name=None, attribute='app',
-            reverse=None, **kw):
+                           reverse=None, **kw):
         """Subclass an app-compatible class by setting its app attribute
         """Subclass an app-compatible class by setting its app attribute
         to be this app instance.
         to be this app instance.
 
 
@@ -429,8 +436,10 @@ class Celery(object):
         # Reduce only pickles the configuration changes,
         # Reduce only pickles the configuration changes,
         # so the default configuration doesn't have to be passed
         # so the default configuration doesn't have to be passed
         # between processes.
         # between processes.
-        return (_unpickle_app, (self.__class__, self.Pickler)
-                              + self.__reduce_args__())
+        return (
+            _unpickle_app,
+            (self.__class__, self.Pickler) + self.__reduce_args__(),
+        )
 
 
     def __reduce_keys__(self):
     def __reduce_keys__(self):
         """Returns keyword arguments used to reconstruct the object
         """Returns keyword arguments used to reconstruct the object

+ 23 - 18
celery/app/builtins.py

@@ -71,7 +71,7 @@ def add_unlock_chord_task(app):
     @app.task(name='celery.chord_unlock', max_retries=None,
     @app.task(name='celery.chord_unlock', max_retries=None,
               default_retry_delay=1, ignore_result=True, _force_evaluate=True)
               default_retry_delay=1, ignore_result=True, _force_evaluate=True)
     def unlock_chord(group_id, callback, interval=None, propagate=False,
     def unlock_chord(group_id, callback, interval=None, propagate=False,
-            max_retries=None, result=None, Result=_res.AsyncResult):
+                     max_retries=None, result=None, Result=_res.AsyncResult):
         if interval is None:
         if interval is None:
             interval = unlock_chord.default_retry_delay
             interval = unlock_chord.default_retry_delay
         result = _res.GroupResult(group_id, [Result(r) for r in result])
         result = _res.GroupResult(group_id, [Result(r) for r in result])
@@ -132,10 +132,12 @@ def add_group_task(app):
             result = from_serializable(result)
             result = from_serializable(result)
             # any partial args are added to all tasks in the group
             # any partial args are added to all tasks in the group
             taskit = (subtask(task).clone(partial_args)
             taskit = (subtask(task).clone(partial_args)
-                        for i, task in enumerate(tasks))
+                      for i, task in enumerate(tasks))
             if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER:
             if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER:
-                return app.GroupResult(result.id,
-                        [task.apply(group_id=group_id) for task in taskit])
+                return app.GroupResult(
+                    result.id,
+                    [task.apply(group_id=group_id) for task in taskit],
+                )
             with app.producer_or_acquire() as pub:
             with app.producer_or_acquire() as pub:
                 [task.apply_async(group_id=group_id, publisher=pub,
                 [task.apply_async(group_id=group_id, publisher=pub,
                                   add_to_parent=False) for task in taskit]
                                   add_to_parent=False) for task in taskit]
@@ -146,8 +148,8 @@ def add_group_task(app):
 
 
         def prepare(self, options, tasks, args, **kwargs):
         def prepare(self, options, tasks, args, **kwargs):
             AsyncResult = self.AsyncResult
             AsyncResult = self.AsyncResult
-            options['group_id'] = group_id = \
-                    options.setdefault('task_id', uuid())
+            options['group_id'] = group_id = (
+                options.setdefault('task_id', uuid()))
 
 
             def prepare_member(task):
             def prepare_member(task):
                 task = maybe_subtask(task)
                 task = maybe_subtask(task)
@@ -160,8 +162,9 @@ def add_group_task(app):
                 return task, AsyncResult(tid)
                 return task, AsyncResult(tid)
 
 
             try:
             try:
-                tasks, res = list(zip(*[prepare_member(task)
-                                                for task in tasks]))
+                tasks, res = list(zip(
+                    *[prepare_member(task) for task in tasks]
+                ))
             except ValueError:  # tasks empty
             except ValueError:  # tasks empty
                 tasks, res = [], []
                 tasks, res = [], []
             return (tasks, self.app.GroupResult(group_id, res), group_id, args)
             return (tasks, self.app.GroupResult(group_id, res), group_id, args)
@@ -169,16 +172,18 @@ def add_group_task(app):
         def apply_async(self, partial_args=(), kwargs={}, **options):
         def apply_async(self, partial_args=(), kwargs={}, **options):
             if self.app.conf.CELERY_ALWAYS_EAGER:
             if self.app.conf.CELERY_ALWAYS_EAGER:
                 return self.apply(partial_args, kwargs, **options)
                 return self.apply(partial_args, kwargs, **options)
-            tasks, result, gid, args = self.prepare(options,
-                                            args=partial_args, **kwargs)
-            super(Group, self).apply_async((list(tasks),
-                result.serializable(), gid, args), **options)
+            tasks, result, gid, args = self.prepare(
+                options, args=partial_args, **kwargs
+            )
+            super(Group, self).apply_async((
+                list(tasks), result.serializable(), gid, args), **options
+            )
             return result
             return result
 
 
         def apply(self, args=(), kwargs={}, **options):
         def apply(self, args=(), kwargs={}, **options):
             return super(Group, self).apply(
             return super(Group, self).apply(
-                    self.prepare(options, args=args, **kwargs),
-                    **options).get()
+                self.prepare(options, args=args, **kwargs),
+                **options).get()
     return Group
     return Group
 
 
 
 
@@ -228,7 +233,7 @@ def add_chain_task(app):
             return tasks, results
             return tasks, results
 
 
         def apply_async(self, args=(), kwargs={}, group_id=None, chord=None,
         def apply_async(self, args=(), kwargs={}, group_id=None, chord=None,
-                task_id=None, link=None, link_error=None, **options):
+                        task_id=None, link=None, link_error=None, **options):
             if self.app.conf.CELERY_ALWAYS_EAGER:
             if self.app.conf.CELERY_ALWAYS_EAGER:
                 return self.apply(args, kwargs, **options)
                 return self.apply(args, kwargs, **options)
             options.pop('publisher', None)
             options.pop('publisher', None)
@@ -289,7 +294,7 @@ def add_chord_task(app):
                 return header.apply(args=partial_args, task_id=group_id)
                 return header.apply(args=partial_args, task_id=group_id)
 
 
             results = [AsyncResult(prepare_member(task, body, group_id))
             results = [AsyncResult(prepare_member(task, body, group_id))
-                            for task in header.tasks]
+                       for task in header.tasks]
 
 
             # - fallback implementations schedules the chord_unlock task here
             # - fallback implementations schedules the chord_unlock task here
             app.backend.on_chord_apply(group_id, body,
             app.backend.on_chord_apply(group_id, body,
@@ -326,7 +331,7 @@ def add_chord_task(app):
             [body.link_error(s) for s in options.pop('link_error', [])]
             [body.link_error(s) for s in options.pop('link_error', [])]
             callback_id = body.options.setdefault('task_id', task_id or uuid())
             callback_id = body.options.setdefault('task_id', task_id or uuid())
             parent = super(Chord, self).apply_async((header, body, args),
             parent = super(Chord, self).apply_async((header, body, args),
-                                                     kwargs, **options)
+                                                    kwargs, **options)
             body_result = self.AsyncResult(callback_id)
             body_result = self.AsyncResult(callback_id)
             body_result.parent = parent
             body_result.parent = parent
             return body_result
             return body_result
@@ -336,5 +341,5 @@ def add_chord_task(app):
             res = super(Chord, self).apply(args, dict(kwargs, eager=True),
             res = super(Chord, self).apply(args, dict(kwargs, eager=True),
                                            **options)
                                            **options)
             return maybe_subtask(body).apply(
             return maybe_subtask(body).apply(
-                        args=(res.get(propagate=propagate).get(), ))
+                args=(res.get(propagate=propagate).get(), ))
     return Chord
     return Chord

+ 32 - 26
celery/app/control.py

@@ -26,7 +26,7 @@ class Inspect(object):
     app = None
     app = None
 
 
     def __init__(self, destination=None, timeout=1, callback=None,
     def __init__(self, destination=None, timeout=1, callback=None,
-            connection=None, app=None, limit=None):
+                 connection=None, app=None, limit=None):
         self.app = app or self.app
         self.app = app or self.app
         self.destination = destination
         self.destination = destination
         self.timeout = timeout
         self.timeout = timeout
@@ -44,13 +44,15 @@ class Inspect(object):
         return by_node
         return by_node
 
 
     def _request(self, command, **kwargs):
     def _request(self, command, **kwargs):
-        return self._prepare(self.app.control.broadcast(command,
-                                      arguments=kwargs,
-                                      destination=self.destination,
-                                      callback=self.callback,
-                                      connection=self.connection,
-                                      limit=self.limit,
-                                      timeout=self.timeout, reply=True))
+        return self._prepare(self.app.control.broadcast(
+            command,
+            arguments=kwargs,
+            destination=self.destination,
+            callback=self.callback,
+            connection=self.connection,
+            limit=self.limit,
+            timeout=self.timeout, reply=True,
+        ))
 
 
     def report(self):
     def report(self):
         return self._request('report')
         return self._request('report')
@@ -120,7 +122,7 @@ class Control(object):
         })
         })
 
 
     def revoke(self, task_id, destination=None, terminate=False,
     def revoke(self, task_id, destination=None, terminate=False,
-            signal='SIGTERM', **kwargs):
+               signal='SIGTERM', **kwargs):
         """Tell all (or specific) workers to revoke a task by id.
         """Tell all (or specific) workers to revoke a task by id.
 
 
         If a task is revoked, the workers will ignore the task and
         If a task is revoked, the workers will ignore the task and
@@ -170,7 +172,7 @@ class Control(object):
                               **kwargs)
                               **kwargs)
 
 
     def add_consumer(self, queue, exchange=None, exchange_type='direct',
     def add_consumer(self, queue, exchange=None, exchange_type='direct',
-            routing_key=None, options=None, **kwargs):
+                     routing_key=None, options=None, **kwargs):
         """Tell all (or specific) workers to start consuming from a new queue.
         """Tell all (or specific) workers to start consuming from a new queue.
 
 
         Only the queue name is required as if only the queue is specified
         Only the queue name is required as if only the queue is specified
@@ -193,11 +195,13 @@ class Control(object):
         See :meth:`broadcast` for supported keyword arguments.
         See :meth:`broadcast` for supported keyword arguments.
 
 
         """
         """
-        return self.broadcast('add_consumer',
-                arguments=dict({'queue': queue, 'exchange': exchange,
-                                'exchange_type': exchange_type,
-                                'routing_key': routing_key}, **options or {}),
-                **kwargs)
+        return self.broadcast(
+            'add_consumer',
+            arguments=dict({'queue': queue, 'exchange': exchange,
+                            'exchange_type': exchange_type,
+                            'routing_key': routing_key}, **options or {}),
+            **kwargs
+        )
 
 
     def cancel_consumer(self, queue, **kwargs):
     def cancel_consumer(self, queue, **kwargs):
         """Tell all (or specific) workers to stop consuming from ``queue``.
         """Tell all (or specific) workers to stop consuming from ``queue``.
@@ -205,8 +209,9 @@ class Control(object):
         Supports the same keyword arguments as :meth:`broadcast`.
         Supports the same keyword arguments as :meth:`broadcast`.
 
 
         """
         """
-        return self.broadcast('cancel_consumer',
-                arguments={'queue': queue}, **kwargs)
+        return self.broadcast(
+            'cancel_consumer', arguments={'queue': queue}, **kwargs
+        )
 
 
     def time_limit(self, task_name, soft=None, hard=None, **kwargs):
     def time_limit(self, task_name, soft=None, hard=None, **kwargs):
         """Tell all (or specific) workers to set time limits for
         """Tell all (or specific) workers to set time limits for
@@ -219,9 +224,10 @@ class Control(object):
         Any additional keyword arguments are passed on to :meth:`broadcast`.
         Any additional keyword arguments are passed on to :meth:`broadcast`.
 
 
         """
         """
-        return self.broadcast('time_limit',
-                              arguments={'task_name': task_name,
-                                         'hard': hard, 'soft': soft}, **kwargs)
+        return self.broadcast(
+            'time_limit',
+            arguments={'task_name': task_name,
+                       'hard': hard, 'soft': soft}, **kwargs)
 
 
     def enable_events(self, destination=None, **kwargs):
     def enable_events(self, destination=None, **kwargs):
         """Tell all (or specific) workers to enable events."""
         """Tell all (or specific) workers to enable events."""
@@ -248,8 +254,8 @@ class Control(object):
         return self.broadcast('pool_shrink', {}, destination, **kwargs)
         return self.broadcast('pool_shrink', {}, destination, **kwargs)
 
 
     def broadcast(self, command, arguments=None, destination=None,
     def broadcast(self, command, arguments=None, destination=None,
-            connection=None, reply=False, timeout=1, limit=None,
-            callback=None, channel=None, **extra_kwargs):
+                  connection=None, reply=False, timeout=1, limit=None,
+                  callback=None, channel=None, **extra_kwargs):
         """Broadcast a control command to the celery workers.
         """Broadcast a control command to the celery workers.
 
 
         :param command: Name of command to send.
         :param command: Name of command to send.
@@ -267,7 +273,7 @@ class Control(object):
         """
         """
         with self.app.connection_or_acquire(connection) as conn:
         with self.app.connection_or_acquire(connection) as conn:
             arguments = dict(arguments or {}, **extra_kwargs)
             arguments = dict(arguments or {}, **extra_kwargs)
-            return self.mailbox(conn)._broadcast(command, arguments,
-                                                 destination, reply, timeout,
-                                                 limit, callback,
-                                                 channel=channel)
+            return self.mailbox(conn)._broadcast(
+                command, arguments, destination, reply, timeout,
+                limit, callback, channel=channel,
+            )

+ 5 - 5
celery/app/defaults.py

@@ -134,10 +134,10 @@ NAMESPACES = {
         'STORE_ERRORS_EVEN_IF_IGNORED': Option(False, type='bool'),
         'STORE_ERRORS_EVEN_IF_IGNORED': Option(False, type='bool'),
         'TASK_PUBLISH_RETRY': Option(True, type='bool'),
         'TASK_PUBLISH_RETRY': Option(True, type='bool'),
         'TASK_PUBLISH_RETRY_POLICY': Option({
         'TASK_PUBLISH_RETRY_POLICY': Option({
-                'max_retries': 5,
-                'interval_start': 0,
-                'interval_max': 1,
-                'interval_step': 0.2}, type='dict'),
+            'max_retries': 5,
+            'interval_start': 0,
+            'interval_max': 1,
+            'interval_step': 0.2}, type='dict'),
         'TASK_RESULT_EXPIRES': Option(timedelta(days=1), type='float'),
         'TASK_RESULT_EXPIRES': Option(timedelta(days=1), type='float'),
         'TASK_SERIALIZER': Option('pickle'),
         'TASK_SERIALIZER': Option('pickle'),
         'TIMEZONE': Option(type='string'),
         'TIMEZONE': Option(type='string'),
@@ -168,7 +168,7 @@ NAMESPACES = {
         'LOG_LEVEL': Option('WARN', deprecate_by='2.4', remove_by='4.0',
         'LOG_LEVEL': Option('WARN', deprecate_by='2.4', remove_by='4.0',
                             alt='--loglevel argument'),
                             alt='--loglevel argument'),
         'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0',
         'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0',
-                            alt='--logfile argument'),
+                           alt='--logfile argument'),
         'MEDIATOR': Option('celery.worker.mediator:Mediator'),
         'MEDIATOR': Option('celery.worker.mediator:Mediator'),
         'MAX_TASKS_PER_CHILD': Option(type='int'),
         'MAX_TASKS_PER_CHILD': Option(type='int'),
         'POOL': Option(DEFAULT_POOL),
         'POOL': Option(DEFAULT_POOL),

+ 26 - 19
celery/app/log.py

@@ -62,7 +62,7 @@ class Logging(object):
         self.colorize = self.app.conf.CELERYD_LOG_COLOR
         self.colorize = self.app.conf.CELERYD_LOG_COLOR
 
 
     def setup(self, loglevel=None, logfile=None, redirect_stdouts=False,
     def setup(self, loglevel=None, logfile=None, redirect_stdouts=False,
-            redirect_level='WARNING', colorize=None):
+              redirect_level='WARNING', colorize=None):
         handled = self.setup_logging_subsystem(
         handled = self.setup_logging_subsystem(
             loglevel, logfile, colorize=colorize,
             loglevel, logfile, colorize=colorize,
         )
         )
@@ -85,7 +85,7 @@ class Logging(object):
         )
         )
 
 
     def setup_logging_subsystem(self, loglevel=None, logfile=None,
     def setup_logging_subsystem(self, loglevel=None, logfile=None,
-            format=None, colorize=None, **kwargs):
+                                format=None, colorize=None, **kwargs):
         if Logging._setup:
         if Logging._setup:
             return
             return
         Logging._setup = True
         Logging._setup = True
@@ -95,9 +95,10 @@ class Logging(object):
         reset_multiprocessing_logger()
         reset_multiprocessing_logger()
         if not PY3:
         if not PY3:
             ensure_process_aware_logger()
             ensure_process_aware_logger()
-        receivers = signals.setup_logging.send(sender=None,
-                        loglevel=loglevel, logfile=logfile,
-                        format=format, colorize=colorize)
+        receivers = signals.setup_logging.send(
+            sender=None, loglevel=loglevel, logfile=logfile,
+            format=format, colorize=colorize,
+        )
         if not receivers:
         if not receivers:
             root = logging.getLogger()
             root = logging.getLogger()
 
 
@@ -110,9 +111,11 @@ class Logging(object):
                                         colorize, **kwargs)
                                         colorize, **kwargs)
                     if loglevel:
                     if loglevel:
                         logger.setLevel(loglevel)
                         logger.setLevel(loglevel)
-                    signals.after_setup_logger.send(sender=None, logger=logger,
-                                            loglevel=loglevel, logfile=logfile,
-                                            format=format, colorize=colorize)
+                    signals.after_setup_logger.send(
+                        sender=None, logger=logger,
+                        loglevel=loglevel, logfile=logfile,
+                        format=format, colorize=colorize,
+                    )
             # then setup the root task logger.
             # then setup the root task logger.
             self.setup_task_loggers(loglevel, logfile, colorize=colorize)
             self.setup_task_loggers(loglevel, logfile, colorize=colorize)
 
 
@@ -125,7 +128,7 @@ class Logging(object):
         return receivers
         return receivers
 
 
     def setup_task_loggers(self, loglevel=None, logfile=None, format=None,
     def setup_task_loggers(self, loglevel=None, logfile=None, format=None,
-            colorize=None, propagate=False, **kwargs):
+                           colorize=None, propagate=False, **kwargs):
         """Setup the task logger.
         """Setup the task logger.
 
 
         If `logfile` is not specified, then `sys.stderr` is used.
         If `logfile` is not specified, then `sys.stderr` is used.
@@ -137,19 +140,23 @@ class Logging(object):
         format = format or self.task_format
         format = format or self.task_format
         colorize = self.supports_color(colorize, logfile)
         colorize = self.supports_color(colorize, logfile)
 
 
-        logger = self.setup_handlers(get_logger('celery.task'),
-                                     logfile, format, colorize,
-                                     formatter=TaskFormatter, **kwargs)
+        logger = self.setup_handlers(
+            get_logger('celery.task'),
+            logfile, format, colorize,
+            formatter=TaskFormatter, **kwargs
+        )
         logger.setLevel(loglevel)
         logger.setLevel(loglevel)
         logger.propagate = int(propagate)    # this is an int for some reason.
         logger.propagate = int(propagate)    # this is an int for some reason.
                                              # better to not question why.
                                              # better to not question why.
-        signals.after_setup_task_logger.send(sender=None, logger=logger,
-                                     loglevel=loglevel, logfile=logfile,
-                                     format=format, colorize=colorize)
+        signals.after_setup_task_logger.send(
+            sender=None, logger=logger,
+            loglevel=loglevel, logfile=logfile,
+            format=format, colorize=colorize,
+        )
         return logger
         return logger
 
 
     def redirect_stdouts_to_logger(self, logger, loglevel=None,
     def redirect_stdouts_to_logger(self, logger, loglevel=None,
-            stdout=True, stderr=True):
+                                   stdout=True, stderr=True):
         """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
         """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
         logging instance.
         logging instance.
 
 
@@ -179,7 +186,7 @@ class Logging(object):
         return colored(enabled=self.supports_color(enabled, logfile))
         return colored(enabled=self.supports_color(enabled, logfile))
 
 
     def setup_handlers(self, logger, logfile, format, colorize,
     def setup_handlers(self, logger, logfile, format, colorize,
-            formatter=ColorFormatter, **kwargs):
+                       formatter=ColorFormatter, **kwargs):
         if self._is_configured(logger):
         if self._is_configured(logger):
             return logger
             return logger
         handler = self._detect_handler(logfile)
         handler = self._detect_handler(logfile)
@@ -197,11 +204,11 @@ class Logging(object):
 
 
     def _has_handler(self, logger):
     def _has_handler(self, logger):
         return (logger.handlers and
         return (logger.handlers and
-                    not isinstance(logger.handlers[0], NullHandler))
+                not isinstance(logger.handlers[0], NullHandler))
 
 
     def _is_configured(self, logger):
     def _is_configured(self, logger):
         return self._has_handler(logger) and not getattr(
         return self._has_handler(logger) and not getattr(
-                logger, '_rudimentary_setup', False)
+            logger, '_rudimentary_setup', False)
 
 
     def setup_logger(self, name='celery', *args, **kwargs):
     def setup_logger(self, name='celery', *args, **kwargs):
         """Deprecated: No longer used."""
         """Deprecated: No longer used."""

+ 1 - 1
celery/app/registry.py

@@ -56,7 +56,7 @@ class TaskRegistry(dict):
 
 
     def filter_types(self, type):
     def filter_types(self, type):
         return dict((name, task) for name, task in items(self)
         return dict((name, task) for name, task in items(self)
-                                if getattr(task, 'type', 'regular') == type)
+                    if getattr(task, 'type', 'regular') == type)
 
 
 
 
 def _unpickle_task(name):
 def _unpickle_task(name):

+ 2 - 2
celery/app/routes.py

@@ -32,8 +32,8 @@ class MapRoute(object):
 
 
 class Router(object):
 class Router(object):
 
 
-    def __init__(self, routes=None, queues=None, create_missing=False,
-            app=None):
+    def __init__(self, routes=None, queues=None,
+                 create_missing=False, app=None):
         self.app = app
         self.app = app
         self.queues = {} if queues is None else queues
         self.queues = {} if queues is None else queues
         self.routes = [] if routes is None else routes
         self.routes = [] if routes is None else routes

+ 14 - 12
celery/app/task.py

@@ -364,9 +364,9 @@ class Task(object):
         return self.apply_async(args, kwargs)
         return self.apply_async(args, kwargs)
 
 
     def apply_async(self, args=None, kwargs=None,
     def apply_async(self, args=None, kwargs=None,
-            task_id=None, producer=None, connection=None, router=None,
-            link=None, link_error=None, publisher=None, add_to_parent=True,
-            **options):
+                    task_id=None, producer=None, connection=None, router=None,
+                    link=None, link_error=None, publisher=None,
+                    add_to_parent=True, **options):
         """Apply tasks asynchronously by sending a message.
         """Apply tasks asynchronously by sending a message.
 
 
         :keyword args: The positional arguments to pass on to the
         :keyword args: The positional arguments to pass on to the
@@ -493,7 +493,7 @@ class Task(object):
         return result
         return result
 
 
     def subtask_from_request(self, request=None, args=None, kwargs=None,
     def subtask_from_request(self, request=None, args=None, kwargs=None,
-            **extra_options):
+                             **extra_options):
 
 
         request = self.request if request is None else request
         request = self.request if request is None else request
         args = request.args if args is None else args
         args = request.args if args is None else args
@@ -509,7 +509,7 @@ class Task(object):
         return self.subtask(args, kwargs, options, type=self, **extra_options)
         return self.subtask(args, kwargs, options, type=self, **extra_options)
 
 
     def retry(self, args=None, kwargs=None, exc=None, throw=True,
     def retry(self, args=None, kwargs=None, exc=None, throw=True,
-            eta=None, countdown=None, max_retries=None, **options):
+              eta=None, countdown=None, max_retries=None, **options):
         """Retry the task.
         """Retry the task.
 
 
         :param args: Positional arguments to retry with.
         :param args: Positional arguments to retry with.
@@ -568,15 +568,17 @@ class Task(object):
         if not eta and countdown is None:
         if not eta and countdown is None:
             countdown = self.default_retry_delay
             countdown = self.default_retry_delay
 
 
-        S = self.subtask_from_request(request, args, kwargs,
-            countdown=countdown, eta=eta, retries=retries)
+        S = self.subtask_from_request(
+            request, args, kwargs,
+            countdown=countdown, eta=eta, retries=retries,
+        )
 
 
         if max_retries is not None and retries > max_retries:
         if max_retries is not None and retries > max_retries:
             if exc:
             if exc:
                 maybe_reraise()
                 maybe_reraise()
             raise self.MaxRetriesExceededError(
             raise self.MaxRetriesExceededError(
-                    "Can't retry {0}[{1}] args:{2} kwargs:{3}".format(
-                        self.name, request.id, S.args, S.kwargs))
+                "Can't retry {0}[{1}] args:{2} kwargs:{3}".format(
+                    self.name, request.id, S.args, S.kwargs))
 
 
         # If task was executed eagerly using apply(),
         # If task was executed eagerly using apply(),
         # then the retry must also be executed eagerly.
         # then the retry must also be executed eagerly.
@@ -631,8 +633,8 @@ class Task(object):
                               'delivery_info': {'is_eager': True}}
                               'delivery_info': {'is_eager': True}}
             supported_keys = fun_takes_kwargs(task.run, default_kwargs)
             supported_keys = fun_takes_kwargs(task.run, default_kwargs)
             extend_with = dict((key, val)
             extend_with = dict((key, val)
-                                    for key, val in items(default_kwargs)
-                                        if key in supported_keys)
+                               for key, val in items(default_kwargs)
+                               if key in supported_keys)
             kwargs.update(extend_with)
             kwargs.update(extend_with)
 
 
         tb = None
         tb = None
@@ -650,7 +652,7 @@ class Task(object):
 
 
         """
         """
         return self._get_app().AsyncResult(task_id, backend=self.backend,
         return self._get_app().AsyncResult(task_id, backend=self.backend,
-                                                    task_name=self.name)
+                                           task_name=self.name)
 
 
     def subtask(self, *args, **kwargs):
     def subtask(self, *args, **kwargs):
         """Returns :class:`~celery.subtask` object for
         """Returns :class:`~celery.subtask` object for

+ 4 - 3
celery/app/utils.py

@@ -96,8 +96,9 @@ class Settings(datastructures.ConfigurationView):
     def humanize(self):
     def humanize(self):
         """Returns a human readable string showing changes to the
         """Returns a human readable string showing changes to the
         configuration."""
         configuration."""
-        return '\n'.join('{0}: {1}'.format(key, pretty(value, width=50))
-                        for key, value in items(self.without_defaults()))
+        return '\n'.join(
+            '{0}: {1}'.format(key, pretty(value, width=50))
+            for key, value in items(self.without_defaults()))
 
 
 
 
 class AppPickler(object):
 class AppPickler(object):
@@ -116,7 +117,7 @@ class AppPickler(object):
         return self.build_standard_kwargs(*args)
         return self.build_standard_kwargs(*args)
 
 
     def build_standard_kwargs(self, main, changes, loader, backend, amqp,
     def build_standard_kwargs(self, main, changes, loader, backend, amqp,
-            events, log, control, accept_magic_kwargs):
+                              events, log, control, accept_magic_kwargs):
         return dict(main=main, loader=loader, backend=backend, amqp=amqp,
         return dict(main=main, loader=loader, backend=backend, amqp=amqp,
                     changes=changes, events=events, log=log, control=control,
                     changes=changes, events=events, log=log, control=control,
                     set_as_current=False,
                     set_as_current=False,

+ 5 - 4
celery/apps/beat.py

@@ -47,7 +47,7 @@ class Beat(configurated):
     redirect_stdouts_level = from_config()
     redirect_stdouts_level = from_config()
 
 
     def __init__(self, max_interval=None, app=None,
     def __init__(self, max_interval=None, app=None,
-            socket_timeout=30, pidfile=None, no_color=None, **kwargs):
+                 socket_timeout=30, pidfile=None, no_color=None, **kwargs):
         """Starts the beat task scheduler."""
         """Starts the beat task scheduler."""
         self.app = app = app_or_default(app or self.app)
         self.app = app = app_or_default(app or self.app)
         self.setup_defaults(kwargs, namespace='celerybeat')
         self.setup_defaults(kwargs, namespace='celerybeat')
@@ -66,7 +66,7 @@ class Beat(configurated):
 
 
     def run(self):
     def run(self):
         print(str(self.colored.cyan(
         print(str(self.colored.cyan(
-                    'celery beat v{0} is starting.'.format(VERSION_BANNER))))
+            'celery beat v{0} is starting.'.format(VERSION_BANNER))))
         self.init_loader()
         self.init_loader()
         self.set_process_title()
         self.set_process_title()
         self.start_scheduler()
         self.start_scheduler()
@@ -125,8 +125,9 @@ class Beat(configurated):
 
 
     def set_process_title(self):
     def set_process_title(self):
         arg_start = 'manage' in sys.argv[0] and 2 or 1
         arg_start = 'manage' in sys.argv[0] and 2 or 1
-        platforms.set_process_title('celery beat',
-                               info=' '.join(sys.argv[arg_start:]))
+        platforms.set_process_title(
+            'celery beat', info=' '.join(sys.argv[arg_start:]),
+        )
 
 
     def install_sync_handler(self, beat):
     def install_sync_handler(self, beat):
         """Install a `SIGTERM` + `SIGINT` handler that saves
         """Install a `SIGTERM` + `SIGINT` handler that saves

+ 13 - 9
celery/apps/worker.py

@@ -41,7 +41,7 @@ is_pypy = hasattr(sys, 'pypy_version_info')
 def active_thread_count():
 def active_thread_count():
     from threading import enumerate
     from threading import enumerate
     return sum(1 for t in enumerate()
     return sum(1 for t in enumerate()
-        if not t.name.startswith('Dummy-'))
+               if not t.name.startswith('Dummy-'))
 
 
 
 
 def safe_say(msg):
 def safe_say(msg):
@@ -144,13 +144,15 @@ class Worker(WorkController):
     def setup_logging(self, colorize=None):
     def setup_logging(self, colorize=None):
         if colorize is None and self.no_color is not None:
         if colorize is None and self.no_color is not None:
             colorize = not self.no_color
             colorize = not self.no_color
-        return self.app.log.setup(self.loglevel, self.logfile,
-                   redirect_stdouts=False, colorize=colorize)
+        return self.app.log.setup(
+            self.loglevel, self.logfile,
+            redirect_stdouts=False, colorize=colorize,
+        )
 
 
     def purge_messages(self):
     def purge_messages(self):
         count = self.app.control.purge()
         count = self.app.control.purge()
         print('purge: Erased {0} {1} from the queue.\n'.format(
         print('purge: Erased {0} {1} from the queue.\n'.format(
-                count, pluralize(count, 'message')))
+            count, pluralize(count, 'message')))
 
 
     def tasklist(self, include_builtins=True):
     def tasklist(self, include_builtins=True):
         tasks = self.app.tasks
         tasks = self.app.tasks
@@ -229,13 +231,15 @@ class Worker(WorkController):
         os.environ.setdefault('celery_dummy_proxy', 'set_by_celeryd')
         os.environ.setdefault('celery_dummy_proxy', 'set_by_celeryd')
 
 
     def set_process_status(self, info):
     def set_process_status(self, info):
-        return platforms.set_mp_process_title('celeryd',
-                info='{0} ({1})'.format(info, platforms.strargv(sys.argv)),
-                hostname=self.hostname)
+        return platforms.set_mp_process_title(
+            'celeryd',
+            info='{0} ({1})'.format(info, platforms.strargv(sys.argv)),
+            hostname=self.hostname,
+        )
 
 
 
 
-def _shutdown_handler(worker, sig='TERM', how='Warm', exc=SystemExit,
-        callback=None):
+def _shutdown_handler(worker, sig='TERM', how='Warm',
+                      exc=SystemExit, callback=None):
 
 
     def _handle_request(signum, frame):
     def _handle_request(signum, frame):
         with in_sighandler():
         with in_sighandler():

+ 1 - 1
celery/backends/__init__.py

@@ -47,7 +47,7 @@ def get_backend_cls(backend=None, loader=None):
         return symbol_by_name(backend, aliases)
         return symbol_by_name(backend, aliases)
     except ValueError as exc:
     except ValueError as exc:
         reraise(ValueError, ValueError(UNKNOWN_BACKEND.format(
         reraise(ValueError, ValueError(UNKNOWN_BACKEND.format(
-                    backend, exc)), sys.exc_info()[2])
+            backend, exc)), sys.exc_info()[2])
 
 
 
 
 def get_backend_by_url(backend=None, loader=None):
 def get_backend_by_url(backend=None, loader=None):

+ 29 - 26
celery/backends/amqp.py

@@ -51,21 +51,21 @@ class AMQPBackend(BaseBackend):
     supports_native_join = True
     supports_native_join = True
 
 
     retry_policy = {
     retry_policy = {
-            'max_retries': 20,
-            'interval_start': 0,
-            'interval_step': 1,
-            'interval_max': 1,
+        'max_retries': 20,
+        'interval_start': 0,
+        'interval_step': 1,
+        'interval_max': 1,
     }
     }
 
 
     def __init__(self, connection=None, exchange=None, exchange_type=None,
     def __init__(self, connection=None, exchange=None, exchange_type=None,
-            persistent=None, serializer=None, auto_delete=True,
-            **kwargs):
+                 persistent=None, serializer=None, auto_delete=True,
+                 **kwargs):
         super(AMQPBackend, self).__init__(**kwargs)
         super(AMQPBackend, self).__init__(**kwargs)
         conf = self.app.conf
         conf = self.app.conf
         self._connection = connection
         self._connection = connection
         self.queue_arguments = {}
         self.queue_arguments = {}
         self.persistent = (conf.CELERY_RESULT_PERSISTENT if persistent is None
         self.persistent = (conf.CELERY_RESULT_PERSISTENT if persistent is None
-                                                         else persistent)
+                           else persistent)
         exchange = exchange or conf.CELERY_RESULT_EXCHANGE
         exchange = exchange or conf.CELERY_RESULT_EXCHANGE
         exchange_type = exchange_type or conf.CELERY_RESULT_EXCHANGE_TYPE
         exchange_type = exchange_type or conf.CELERY_RESULT_EXCHANGE_TYPE
         self.exchange = self._create_exchange(exchange, exchange_type,
         self.exchange = self._create_exchange(exchange, exchange_type,
@@ -104,8 +104,9 @@ class AMQPBackend(BaseBackend):
         return task_id.replace('-', '')
         return task_id.replace('-', '')
 
 
     def _republish(self, channel, task_id, body, content_type,
     def _republish(self, channel, task_id, body, content_type,
-            content_encoding):
-        return Producer(channel).publish(body,
+                   content_encoding):
+        return Producer(channel).publish(
+            body,
             exchange=self.exchange,
             exchange=self.exchange,
             routing_key=self._routing_key(task_id),
             routing_key=self._routing_key(task_id),
             serializer=self.serializer,
             serializer=self.serializer,
@@ -134,7 +135,7 @@ class AMQPBackend(BaseBackend):
         return [self._create_binding(task_id)]
         return [self._create_binding(task_id)]
 
 
     def wait_for(self, task_id, timeout=None, cache=True, propagate=True,
     def wait_for(self, task_id, timeout=None, cache=True, propagate=True,
-            **kwargs):
+                 **kwargs):
         cached_meta = self._cache.get(task_id)
         cached_meta = self._cache.get(task_id)
         if cache and cached_meta and \
         if cache and cached_meta and \
                 cached_meta['status'] in states.READY_STATES:
                 cached_meta['status'] in states.READY_STATES:
@@ -183,8 +184,8 @@ class AMQPBackend(BaseBackend):
                     return {'status': states.PENDING, 'result': None}
                     return {'status': states.PENDING, 'result': None}
     poll = get_task_meta  # XXX compat
     poll = get_task_meta  # XXX compat
 
 
-    def drain_events(self, connection, consumer, timeout=None, now=time.time,
-            wait=None):
+    def drain_events(self, connection, consumer,
+                     timeout=None, now=time.time, wait=None):
         wait = wait or connection.drain_events
         wait = wait or connection.drain_events
         results = {}
         results = {}
 
 
@@ -244,8 +245,8 @@ class AMQPBackend(BaseBackend):
                         self._cache[task_id] = meta
                         self._cache[task_id] = meta
 
 
             bindings = self._many_bindings(task_ids)
             bindings = self._many_bindings(task_ids)
-            with self.Consumer(channel, bindings, callbacks=[callback],
-                    no_ack=True):
+            with self.Consumer(channel, bindings,
+                               callbacks=[callback], no_ack=True):
                 wait = conn.drain_events
                 wait = conn.drain_events
                 popleft = results.popleft
                 popleft = results.popleft
                 while ids:
                 while ids:
@@ -259,31 +260,33 @@ class AMQPBackend(BaseBackend):
 
 
     def reload_task_result(self, task_id):
     def reload_task_result(self, task_id):
         raise NotImplementedError(
         raise NotImplementedError(
-                'reload_task_result is not supported by this backend.')
+            'reload_task_result is not supported by this backend.')
 
 
     def reload_group_result(self, task_id):
     def reload_group_result(self, task_id):
         """Reload group result, even if it has been previously fetched."""
         """Reload group result, even if it has been previously fetched."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'reload_group_result is not supported by this backend.')
+            'reload_group_result is not supported by this backend.')
 
 
     def save_group(self, group_id, result):
     def save_group(self, group_id, result):
         raise NotImplementedError(
         raise NotImplementedError(
-                'save_group is not supported by this backend.')
+            'save_group is not supported by this backend.')
 
 
     def restore_group(self, group_id, cache=True):
     def restore_group(self, group_id, cache=True):
         raise NotImplementedError(
         raise NotImplementedError(
-                'restore_group is not supported by this backend.')
+            'restore_group is not supported by this backend.')
 
 
     def delete_group(self, group_id):
     def delete_group(self, group_id):
         raise NotImplementedError(
         raise NotImplementedError(
-                'delete_group is not supported by this backend.')
+            'delete_group is not supported by this backend.')
 
 
     def __reduce__(self, args=(), kwargs={}):
     def __reduce__(self, args=(), kwargs={}):
-        kwargs.update(connection=self._connection,
-                      exchange=self.exchange.name,
-                      exchange_type=self.exchange.type,
-                      persistent=self.persistent,
-                      serializer=self.serializer,
-                      auto_delete=self.auto_delete,
-                      expires=self.expires)
+        kwargs.update(
+            connection=self._connection,
+            exchange=self.exchange.name,
+            exchange_type=self.exchange.type,
+            persistent=self.persistent,
+            serializer=self.serializer,
+            auto_delete=self.auto_delete,
+            expires=self.expires,
+        )
         return super(AMQPBackend, self).__reduce__(args, kwargs)
         return super(AMQPBackend, self).__reduce__(args, kwargs)

+ 18 - 15
celery/backends/base.py

@@ -29,9 +29,9 @@ from celery.five import items
 from celery.result import from_serializable, GroupResult
 from celery.result import from_serializable, GroupResult
 from celery.utils import timeutils
 from celery.utils import timeutils
 from celery.utils.serialization import (
 from celery.utils.serialization import (
-        get_pickled_exception,
-        get_pickleable_exception,
-        create_exception_cls,
+    get_pickled_exception,
+    get_pickleable_exception,
+    create_exception_cls,
 )
 )
 
 
 EXCEPTION_ABLE_CODECS = frozenset(['pickle', 'yaml'])
 EXCEPTION_ABLE_CODECS = frozenset(['pickle', 'yaml'])
@@ -58,16 +58,18 @@ class BaseBackend(object):
     #: If true the backend must implement :meth:`get_many`.
     #: If true the backend must implement :meth:`get_many`.
     supports_native_join = False
     supports_native_join = False
 
 
-    def __init__(self, app=None, serializer=None, max_cached_results=None,
-            **kwargs):
+    def __init__(self, app=None, serializer=None,
+                 max_cached_results=None, **kwargs):
         from celery.app import app_or_default
         from celery.app import app_or_default
         self.app = app_or_default(app)
         self.app = app_or_default(app)
-        self.serializer = serializer or self.app.conf.CELERY_RESULT_SERIALIZER
+        conf = self.app.conf
+        self.serializer = serializer or conf.CELERY_RESULT_SERIALIZER
         (self.content_type,
         (self.content_type,
          self.content_encoding,
          self.content_encoding,
          self.encoder) = serialization.registry._encoders[self.serializer]
          self.encoder) = serialization.registry._encoders[self.serializer]
-        self._cache = LRUCache(limit=max_cached_results or
-                                      self.app.conf.CELERY_MAX_CACHED_RESULTS)
+        self._cache = LRUCache(
+            limit=max_cached_results or conf.CELERY_MAX_CACHED_RESULTS,
+        )
 
 
     def mark_as_started(self, task_id, **meta):
     def mark_as_started(self, task_id, **meta):
         """Mark a task as started"""
         """Mark a task as started"""
@@ -326,13 +328,13 @@ class KeyValueStoreBackend(BaseBackend):
         if hasattr(values, 'items'):
         if hasattr(values, 'items'):
             # client returns dict so mapping preserved.
             # client returns dict so mapping preserved.
             return dict((self._strip_prefix(k), self.decode(v))
             return dict((self._strip_prefix(k), self.decode(v))
-                            for k, v in items(values)
-                                if v is not None)
+                        for k, v in items(values)
+                        if v is not None)
         else:
         else:
             # client returns list so need to recreate mapping.
             # client returns list so need to recreate mapping.
             return dict((bytes_to_str(keys[i]), self.decode(value))
             return dict((bytes_to_str(keys[i]), self.decode(value))
-                            for i, value in enumerate(values)
-                                if value is not None)
+                        for i, value in enumerate(values)
+                        if value is not None)
 
 
     def get_many(self, task_ids, timeout=None, interval=0.5):
     def get_many(self, task_ids, timeout=None, interval=0.5):
         ids = set(task_ids)
         ids = set(task_ids)
@@ -352,7 +354,7 @@ class KeyValueStoreBackend(BaseBackend):
         while ids:
         while ids:
             keys = list(ids)
             keys = list(ids)
             r = self._mget_to_results(self.mget([self.get_key_for_task(k)
             r = self._mget_to_results(self.mget([self.get_key_for_task(k)
-                                                    for k in keys]), keys)
+                                                 for k in keys]), keys)
             self._cache.update(r)
             self._cache.update(r)
             ids.difference_update(set(map(bytes_to_str, r)))
             ids.difference_update(set(map(bytes_to_str, r)))
             for key, value in items(r):
             for key, value in items(r):
@@ -431,6 +433,7 @@ class DisabledBackend(BaseBackend):
         pass
         pass
 
 
     def _is_disabled(self, *args, **kwargs):
     def _is_disabled(self, *args, **kwargs):
-        raise NotImplementedError('No result backend configured.  '
-                'Please see the documentation for more information.')
+        raise NotImplementedError(
+            'No result backend configured.  '
+            'Please see the documentation for more information.')
     wait_for = get_status = get_result = get_traceback = _is_disabled
     wait_for = get_status = get_result = get_traceback = _is_disabled

+ 8 - 7
celery/backends/cassandra.py

@@ -47,7 +47,7 @@ class CassandraBackend(BaseBackend):
     _retry_wait = 3
     _retry_wait = 3
 
 
     def __init__(self, servers=None, keyspace=None, column_family=None,
     def __init__(self, servers=None, keyspace=None, column_family=None,
-            cassandra_options=None, detailed_mode=False, **kwargs):
+                 cassandra_options=None, detailed_mode=False, **kwargs):
         """Initialize Cassandra backend.
         """Initialize Cassandra backend.
 
 
         Raises :class:`celery.exceptions.ImproperlyConfigured` if
         Raises :class:`celery.exceptions.ImproperlyConfigured` if
@@ -57,7 +57,7 @@ class CassandraBackend(BaseBackend):
         super(CassandraBackend, self).__init__(**kwargs)
         super(CassandraBackend, self).__init__(**kwargs)
 
 
         self.expires = kwargs.get('expires') or maybe_timedelta(
         self.expires = kwargs.get('expires') or maybe_timedelta(
-                                    self.app.conf.CELERY_TASK_RESULT_EXPIRES)
+            self.app.conf.CELERY_TASK_RESULT_EXPIRES)
 
 
         if not pycassa:
         if not pycassa:
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
@@ -94,7 +94,7 @@ class CassandraBackend(BaseBackend):
 
 
         if not self.servers or not self.keyspace or not self.column_family:
         if not self.servers or not self.keyspace or not self.column_family:
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
-                    'Cassandra backend not configured.')
+                'Cassandra backend not configured.')
 
 
         self._column_family = None
         self._column_family = None
 
 
@@ -119,10 +119,11 @@ class CassandraBackend(BaseBackend):
             conn = pycassa.ConnectionPool(self.keyspace,
             conn = pycassa.ConnectionPool(self.keyspace,
                                           server_list=self.servers,
                                           server_list=self.servers,
                                           **self.cassandra_options)
                                           **self.cassandra_options)
-            self._column_family = \
-              pycassa.ColumnFamily(conn, self.column_family,
-                    read_consistency_level=self.read_consistency,
-                    write_consistency_level=self.write_consistency)
+            self._column_family = pycassa.ColumnFamily(
+                conn, self.column_family,
+                read_consistency_level=self.read_consistency,
+                write_consistency_level=self.write_consistency,
+            )
         return self._column_family
         return self._column_family
 
 
     def process_cleanup(self):
     def process_cleanup(self):

+ 20 - 16
celery/backends/database/__init__.py

@@ -57,29 +57,33 @@ class DatabaseBackend(BaseBackend):
     subpolling_interval = 0.5
     subpolling_interval = 0.5
 
 
     def __init__(self, dburi=None, expires=None,
     def __init__(self, dburi=None, expires=None,
-            engine_options=None, **kwargs):
+                 engine_options=None, **kwargs):
         super(DatabaseBackend, self).__init__(**kwargs)
         super(DatabaseBackend, self).__init__(**kwargs)
         conf = self.app.conf
         conf = self.app.conf
         self.expires = maybe_timedelta(self.prepare_expires(expires))
         self.expires = maybe_timedelta(self.prepare_expires(expires))
         self.dburi = dburi or conf.CELERY_RESULT_DBURI
         self.dburi = dburi or conf.CELERY_RESULT_DBURI
-        self.engine_options = dict(engine_options or {},
-                        **conf.CELERY_RESULT_ENGINE_OPTIONS or {})
-        self.short_lived_sessions = kwargs.get('short_lived_sessions',
-                                    conf.CELERY_RESULT_DB_SHORT_LIVED_SESSIONS)
+        self.engine_options = dict(
+            engine_options or {},
+            **conf.CELERY_RESULT_ENGINE_OPTIONS or {})
+        self.short_lived_sessions = kwargs.get(
+            'short_lived_sessions',
+            conf.CELERY_RESULT_DB_SHORT_LIVED_SESSIONS,
+        )
         if not self.dburi:
         if not self.dburi:
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
-                    'Missing connection string! Do you have '
-                    'CELERY_RESULT_DBURI set to a real value?')
+                'Missing connection string! Do you have '
+                'CELERY_RESULT_DBURI set to a real value?')
 
 
     def ResultSession(self):
     def ResultSession(self):
         return ResultSession(
         return ResultSession(
-                    dburi=self.dburi,
-                    short_lived_sessions=self.short_lived_sessions,
-                    **self.engine_options)
+            dburi=self.dburi,
+            short_lived_sessions=self.short_lived_sessions,
+            **self.engine_options
+        )
 
 
     @retry
     @retry
-    def _store_result(self, task_id, result, status, traceback=None,
-            max_retries=3):
+    def _store_result(self, task_id, result, status,
+                      traceback=None, max_retries=3):
         """Store return value and status of an executed task."""
         """Store return value and status of an executed task."""
         session = self.ResultSession()
         session = self.ResultSession()
         try:
         try:
@@ -129,7 +133,7 @@ class DatabaseBackend(BaseBackend):
         session = self.ResultSession()
         session = self.ResultSession()
         try:
         try:
             group = session.query(TaskSet).filter(
             group = session.query(TaskSet).filter(
-                    TaskSet.taskset_id == group_id).first()
+                TaskSet.taskset_id == group_id).first()
             if group:
             if group:
                 return group.to_dict()
                 return group.to_dict()
         finally:
         finally:
@@ -141,7 +145,7 @@ class DatabaseBackend(BaseBackend):
         session = self.ResultSession()
         session = self.ResultSession()
         try:
         try:
             session.query(TaskSet).filter(
             session.query(TaskSet).filter(
-                    TaskSet.taskset_id == group_id).delete()
+                TaskSet.taskset_id == group_id).delete()
             session.flush()
             session.flush()
             session.commit()
             session.commit()
         finally:
         finally:
@@ -164,9 +168,9 @@ class DatabaseBackend(BaseBackend):
         now = self.app.now()
         now = self.app.now()
         try:
         try:
             session.query(Task).filter(
             session.query(Task).filter(
-                    Task.date_done < (now - expires)).delete()
+                Task.date_done < (now - expires)).delete()
             session.query(TaskSet).filter(
             session.query(TaskSet).filter(
-                    TaskSet.date_done < (now - expires)).delete()
+                TaskSet.date_done < (now - expires)).delete()
             session.commit()
             session.commit()
         finally:
         finally:
             session.close()
             session.close()

+ 3 - 3
celery/backends/database/models.py

@@ -30,7 +30,7 @@ class Task(ResultModelBase):
     status = sa.Column(sa.String(50), default=states.PENDING)
     status = sa.Column(sa.String(50), default=states.PENDING)
     result = sa.Column(PickleType, nullable=True)
     result = sa.Column(PickleType, nullable=True)
     date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
     date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
-                       onupdate=datetime.utcnow, nullable=True)
+                          onupdate=datetime.utcnow, nullable=True)
     traceback = sa.Column(sa.Text, nullable=True)
     traceback = sa.Column(sa.Text, nullable=True)
 
 
     def __init__(self, task_id):
     def __init__(self, task_id):
@@ -53,11 +53,11 @@ class TaskSet(ResultModelBase):
     __table_args__ = {'sqlite_autoincrement': True}
     __table_args__ = {'sqlite_autoincrement': True}
 
 
     id = sa.Column(sa.Integer, sa.Sequence('taskset_id_sequence'),
     id = sa.Column(sa.Integer, sa.Sequence('taskset_id_sequence'),
-                autoincrement=True, primary_key=True)
+                   autoincrement=True, primary_key=True)
     taskset_id = sa.Column(sa.String(255), unique=True)
     taskset_id = sa.Column(sa.String(255), unique=True)
     result = sa.Column(sa.PickleType, nullable=True)
     result = sa.Column(sa.PickleType, nullable=True)
     date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
     date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
-                       nullable=True)
+                          nullable=True)
 
 
     def __init__(self, taskset_id, result):
     def __init__(self, taskset_id, result):
         self.taskset_id = taskset_id
         self.taskset_id = taskset_id

+ 4 - 8
celery/backends/mongodb.py

@@ -57,7 +57,7 @@ class MongoBackend(BaseBackend):
         """
         """
         super(MongoBackend, self).__init__(*args, **kwargs)
         super(MongoBackend, self).__init__(*args, **kwargs)
         self.expires = kwargs.get('expires') or maybe_timedelta(
         self.expires = kwargs.get('expires') or maybe_timedelta(
-                                    self.app.conf.CELERY_TASK_RESULT_EXPIRES)
+            self.app.conf.CELERY_TASK_RESULT_EXPIRES)
 
 
         if not pymongo:
         if not pymongo:
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
@@ -74,9 +74,9 @@ class MongoBackend(BaseBackend):
             self.mongodb_port = int(config.get('port', self.mongodb_port))
             self.mongodb_port = int(config.get('port', self.mongodb_port))
             self.mongodb_user = config.get('user', self.mongodb_user)
             self.mongodb_user = config.get('user', self.mongodb_user)
             self.mongodb_password = config.get(
             self.mongodb_password = config.get(
-                    'password', self.mongodb_password)
+                'password', self.mongodb_password)
             self.mongodb_database = config.get(
             self.mongodb_database = config.get(
-                    'database', self.mongodb_database)
+                'database', self.mongodb_database)
             self.mongodb_taskmeta_collection = config.get(
             self.mongodb_taskmeta_collection = config.get(
                 'taskmeta_collection', self.mongodb_taskmeta_collection)
                 'taskmeta_collection', self.mongodb_taskmeta_collection)
             self.mongodb_max_pool_size = config.get(
             self.mongodb_max_pool_size = config.get(
@@ -183,11 +183,7 @@ class MongoBackend(BaseBackend):
     def cleanup(self):
     def cleanup(self):
         """Delete expired metadata."""
         """Delete expired metadata."""
         self.collection.remove(
         self.collection.remove(
-            {
-                'date_done': {
-                    '$lt': self.app.now() - self.expires,
-                },
-            },
+            {'date_done': {'$lt': self.app.now() - self.expires}},
         )
         )
 
 
     def __reduce__(self, args=(), kwargs={}):
     def __reduce__(self, args=(), kwargs={}):

+ 1 - 1
celery/backends/redis.py

@@ -52,7 +52,7 @@ class RedisBackend(KeyValueStoreBackend):
     implements_incr = True
     implements_incr = True
 
 
     def __init__(self, host=None, port=None, db=None, password=None,
     def __init__(self, host=None, port=None, db=None, password=None,
-            expires=None, max_connections=None, url=None, **kwargs):
+                 expires=None, max_connections=None, url=None, **kwargs):
         super(RedisBackend, self).__init__(**kwargs)
         super(RedisBackend, self).__init__(**kwargs)
         conf = self.app.conf
         conf = self.app.conf
         if self.redis is None:
         if self.redis is None:

+ 1 - 1
celery/backends/rpc.py

@@ -26,7 +26,7 @@ class RPCBackend(amqp.AMQPBackend):
 
 
     def _create_exchange(self, name, type='direct', persistent=False):
     def _create_exchange(self, name, type='direct', persistent=False):
         return self.Exchange('c.rep', type=type, delivery_mode=1,
         return self.Exchange('c.rep', type=type, delivery_mode=1,
-                durable=False, auto_delete=False)
+                             durable=False, auto_delete=False)
 
 
     def on_task_call(self, producer, task_id):
     def on_task_call(self, producer, task_id):
         maybe_declare(self.binding(producer.channel), retry=True)
         maybe_declare(self.binding(producer.channel), retry=True)

+ 28 - 22
celery/beat.py

@@ -79,8 +79,8 @@ class ScheduleEntry(object):
     total_run_count = 0
     total_run_count = 0
 
 
     def __init__(self, name=None, task=None, last_run_at=None,
     def __init__(self, name=None, task=None, last_run_at=None,
-            total_run_count=None, schedule=None, args=(), kwargs={},
-            options={}, relative=False):
+                 total_run_count=None, schedule=None, args=(), kwargs={},
+                 options={}, relative=False):
         self.name = name
         self.name = name
         self.task = task
         self.task = task
         self.args = args
         self.args = args
@@ -96,9 +96,11 @@ class ScheduleEntry(object):
     def _next_instance(self, last_run_at=None):
     def _next_instance(self, last_run_at=None):
         """Returns a new instance of the same class, but with
         """Returns a new instance of the same class, but with
         its date and count fields updated."""
         its date and count fields updated."""
-        return self.__class__(**dict(self,
-                                last_run_at=last_run_at or self._default_now(),
-                                total_run_count=self.total_run_count + 1))
+        return self.__class__(**dict(
+            self,
+            last_run_at=last_run_at or self._default_now(),
+            total_run_count=self.total_run_count + 1,
+        ))
     __next__ = next = _next_instance  # for 2to3
     __next__ = next = _next_instance  # for 2to3
 
 
     def update(self, other):
     def update(self, other):
@@ -120,8 +122,10 @@ class ScheduleEntry(object):
         return iter(items(vars(self)))
         return iter(items(vars(self)))
 
 
     def __repr__(self):
     def __repr__(self):
-        return '<Entry: {0.name} {call} {0.schedule}'.format(self,
-            call=reprcall(self.task, self.args or (), self.kwargs or {}))
+        return '<Entry: {0.name} {call} {0.schedule}'.format(
+            self,
+            call=reprcall(self.task, self.args or (), self.kwargs or {}),
+        )
 
 
 
 
 class Scheduler(object):
 class Scheduler(object):
@@ -148,12 +152,12 @@ class Scheduler(object):
     logger = logger  # compat
     logger = logger  # compat
 
 
     def __init__(self, schedule=None, max_interval=None,
     def __init__(self, schedule=None, max_interval=None,
-            app=None, Publisher=None, lazy=False, **kwargs):
+                 app=None, Publisher=None, lazy=False, **kwargs):
         app = self.app = app_or_default(app)
         app = self.app = app_or_default(app)
         self.data = maybe_promise({} if schedule is None else schedule)
         self.data = maybe_promise({} if schedule is None else schedule)
         self.max_interval = (max_interval
         self.max_interval = (max_interval
-                                or app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
-                                or self.max_interval)
+                             or app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
+                             or self.max_interval)
         self.Publisher = Publisher or app.amqp.TaskProducer
         self.Publisher = Publisher or app.amqp.TaskProducer
         if not lazy:
         if not lazy:
             self.setup_schedule()
             self.setup_schedule()
@@ -163,9 +167,9 @@ class Scheduler(object):
         if self.app.conf.CELERY_TASK_RESULT_EXPIRES:
         if self.app.conf.CELERY_TASK_RESULT_EXPIRES:
             if 'celery.backend_cleanup' not in data:
             if 'celery.backend_cleanup' not in data:
                 entries['celery.backend_cleanup'] = {
                 entries['celery.backend_cleanup'] = {
-                        'task': 'celery.backend_cleanup',
-                        'schedule': crontab('0', '4', '*'),
-                        'options': {'expires': 12 * 3600}}
+                    'task': 'celery.backend_cleanup',
+                    'schedule': crontab('0', '4', '*'),
+                    'options': {'expires': 12 * 3600}}
         self.update_from_dict(entries)
         self.update_from_dict(entries)
 
 
     def maybe_due(self, entry, publisher=None):
     def maybe_due(self, entry, publisher=None):
@@ -262,8 +266,9 @@ class Scheduler(object):
         return self.Entry(**dict(entry, name=name))
         return self.Entry(**dict(entry, name=name))
 
 
     def update_from_dict(self, dict_):
     def update_from_dict(self, dict_):
-        self.schedule.update(dict((name, self._maybe_entry(name, entry))
-                                for name, entry in items(dict_)))
+        self.schedule.update(dict(
+            (name, self._maybe_entry(name, entry))
+            for name, entry in items(dict_)))
 
 
     def merge_inplace(self, b):
     def merge_inplace(self, b):
         schedule = self.schedule
         schedule = self.schedule
@@ -288,8 +293,9 @@ class Scheduler(object):
             error('beat: Connection error: %s. '
             error('beat: Connection error: %s. '
                   'Trying again in %s seconds...', exc, interval)
                   'Trying again in %s seconds...', exc, interval)
 
 
-        return self.connection.ensure_connection(_error_handler,
-                    self.app.conf.BROKER_CONNECTION_MAX_RETRIES)
+        return self.connection.ensure_connection(
+            _error_handler, self.app.conf.BROKER_CONNECTION_MAX_RETRIES
+        )
 
 
     def get_schedule(self):
     def get_schedule(self):
         return self.data
         return self.data
@@ -365,8 +371,8 @@ class PersistentScheduler(Scheduler):
         self.install_default_entries(self.schedule)
         self.install_default_entries(self.schedule)
         self._store.update(__version__=__version__, tz=tz, utc_enabled=utc)
         self._store.update(__version__=__version__, tz=tz, utc_enabled=utc)
         self.sync()
         self.sync()
-        debug('Current schedule:\n' + '\n'.join(repr(entry)
-                                    for entry in values(entries)))
+        debug('Current schedule:\n' + '\n'.join(
+            repr(entry) for entry in values(entries)))
 
 
     def get_schedule(self):
     def get_schedule(self):
         return self._store['entries']
         return self._store['entries']
@@ -392,13 +398,13 @@ class Service(object):
     scheduler_cls = PersistentScheduler
     scheduler_cls = PersistentScheduler
 
 
     def __init__(self, max_interval=None, schedule_filename=None,
     def __init__(self, max_interval=None, schedule_filename=None,
-            scheduler_cls=None, app=None):
+                 scheduler_cls=None, app=None):
         app = self.app = app_or_default(app)
         app = self.app = app_or_default(app)
         self.max_interval = (max_interval
         self.max_interval = (max_interval
                              or app.conf.CELERYBEAT_MAX_LOOP_INTERVAL)
                              or app.conf.CELERYBEAT_MAX_LOOP_INTERVAL)
         self.scheduler_cls = scheduler_cls or self.scheduler_cls
         self.scheduler_cls = scheduler_cls or self.scheduler_cls
-        self.schedule_filename = schedule_filename or \
-                                    app.conf.CELERYBEAT_SCHEDULE_FILENAME
+        self.schedule_filename = (
+            schedule_filename or app.conf.CELERYBEAT_SCHEDULE_FILENAME)
 
 
         self._is_shutdown = Event()
         self._is_shutdown = Event()
         self._is_stopped = Event()
         self._is_stopped = Event()

+ 5 - 5
celery/bin/amqp.py

@@ -90,8 +90,8 @@ class Spec(object):
             ('pobox', True)
             ('pobox', True)
 
 
         """
         """
-        return tuple(self.coerce(index, value)
-                for index, value in enumerate(arglist))
+        return tuple(
+            self.coerce(index, value) for index, value in enumerate(arglist))
 
 
     def format_response(self, response):
     def format_response(self, response):
         """Format the return value of this command in a human-friendly way."""
         """Format the return value of this command in a human-friendly way."""
@@ -110,7 +110,7 @@ class Spec(object):
 
 
     def format_signature(self):
     def format_signature(self):
         return ' '.join(self.format_arg(*padlist(list(arg), 3))
         return ' '.join(self.format_arg(*padlist(list(arg), 3))
-                            for arg in self.args)
+                        for arg in self.args)
 
 
 
 
 def dump_message(message):
 def dump_message(message):
@@ -256,11 +256,11 @@ class AMQShell(cmd.Cmd):
         """Return all commands starting with `text`, for tab-completion."""
         """Return all commands starting with `text`, for tab-completion."""
         names = self.get_names()
         names = self.get_names()
         first = [cmd for cmd in names
         first = [cmd for cmd in names
-                        if cmd.startswith(text.replace('_', '.'))]
+                 if cmd.startswith(text.replace('_', '.'))]
         if first:
         if first:
             return first
             return first
         return [cmd for cmd in names
         return [cmd for cmd in names
-                    if cmd.partition('.')[2].startswith(text)]
+                if cmd.partition('.')[2].startswith(text)]
 
 
     def dispatch(self, cmd, argline):
     def dispatch(self, cmd, argline):
         """Dispatch and execute the command.
         """Dispatch and execute the command.

+ 12 - 11
celery/bin/base.py

@@ -145,7 +145,7 @@ class HelpFormatter(IndentedHelpFormatter):
 
 
     def format_description(self, description):
     def format_description(self, description):
         return text.ensure_2lines(text.fill_paragraphs(
         return text.ensure_2lines(text.fill_paragraphs(
-                text.dedent(description), self.width))
+            text.dedent(description), self.width))
 
 
 
 
 class Command(object):
 class Command(object):
@@ -211,7 +211,7 @@ class Command(object):
     prog_name = 'celery'
     prog_name = 'celery'
 
 
     def __init__(self, app=None, get_app=None, no_color=False,
     def __init__(self, app=None, get_app=None, no_color=False,
-            stdout=None, stderr=None, quiet=False):
+                 stdout=None, stderr=None, quiet=False):
         self.app = app
         self.app = app
         self.get_app = get_app or self._get_default_app
         self.get_app = get_app or self._get_default_app
         self.stdout = stdout or sys.stdout
         self.stdout = stdout or sys.stdout
@@ -298,8 +298,8 @@ class Command(object):
     def prepare_args(self, options, args):
     def prepare_args(self, options, args):
         if options:
         if options:
             options = dict((k, self.expanduser(v))
             options = dict((k, self.expanduser(v))
-                            for k, v in items(vars(options))
-                                if not k.startswith('_'))
+                           for k, v in items(vars(options))
+                           if not k.startswith('_'))
         args = [self.expanduser(arg) for arg in args]
         args = [self.expanduser(arg) for arg in args]
         self.check_args(args)
         self.check_args(args)
         return options, args
         return options, args
@@ -338,8 +338,8 @@ class Command(object):
             epilog=self.epilog,
             epilog=self.epilog,
             formatter=HelpFormatter(),
             formatter=HelpFormatter(),
             description=self.description,
             description=self.description,
-            option_list=(self.preload_options + self.get_options())),
-        )
+            option_list=(self.preload_options + self.get_options()),
+        ))
 
 
     def prepare_parser(self, parser):
     def prepare_parser(self, parser):
         docs = [self.parse_doc(doc) for doc in (self.doc, __doc__) if doc]
         docs = [self.parse_doc(doc) for doc in (self.doc, __doc__) if doc]
@@ -399,7 +399,7 @@ class Command(object):
             except AttributeError:
             except AttributeError:
                 if getattr(sym, '__path__', None):
                 if getattr(sym, '__path__', None):
                     return self.find_app('{0}.celery:'.format(
                     return self.find_app('{0}.celery:'.format(
-                                app.replace(':', '')))
+                                         app.replace(':', '')))
                 raise
                 raise
         return sym
         return sym
 
 
@@ -448,8 +448,8 @@ class Command(object):
                     in_option = m.groups()[0].strip()
                     in_option = m.groups()[0].strip()
                 assert in_option, 'missing long opt'
                 assert in_option, 'missing long opt'
             elif in_option and line.startswith(' ' * 4):
             elif in_option and line.startswith(' ' * 4):
-                options[in_option].append(find_rst_ref.sub(r'\1',
-                    line.strip()).replace('`', ''))
+                options[in_option].append(
+                    find_rst_ref.sub(r'\1', line.strip()).replace('`', ''))
         return options
         return options
 
 
     def with_pool_option(self, argv):
     def with_pool_option(self, argv):
@@ -477,8 +477,9 @@ class Command(object):
         c = self.colored
         c = self.colored
         if not n:
         if not n:
             return '- empty -'
             return '- empty -'
-        return '\n'.join(str(c.reset(c.white('*'), ' {0}'.format(item)))
-                            for item in n)
+        return '\n'.join(
+            str(c.reset(c.white('*'), ' {0}'.format(item))) for item in n
+        )
 
 
     def pretty_dict_ok_error(self, n):
     def pretty_dict_ok_error(self, n):
         c = self.colored
         c = self.colored

+ 71 - 66
celery/bin/celery.py

@@ -121,7 +121,7 @@ class list_(Command):
             raise Error('You must specify one of {0}'.format(available))
             raise Error('You must specify one of {0}'.format(available))
         if what not in topics:
         if what not in topics:
             raise Error('unknown topic {0!r} (choose one of: {1})'.format(
             raise Error('unknown topic {0!r} (choose one of: {1})'.format(
-                            what, available))
+                what, available))
         with self.app.connection() as conn:
         with self.app.connection() as conn:
             self.app.amqp.TaskConsumer(conn).declare()
             self.app.amqp.TaskConsumer(conn).declare()
             topics[what](conn.manager)
             topics[what](conn.manager)
@@ -137,16 +137,16 @@ class call(Command):
     """
     """
     args = '<task_name>'
     args = '<task_name>'
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-            Option('--args', '-a', help='positional arguments (json).'),
-            Option('--kwargs', '-k', help='keyword arguments (json).'),
-            Option('--eta', help='scheduled time (ISO-8601).'),
-            Option('--countdown', type='float',
-                help='eta in seconds from now (float/int).'),
-            Option('--expires', help='expiry time (ISO-8601/float/int).'),
-            Option('--serializer', default='json', help='defaults to json.'),
-            Option('--queue', help='custom queue name.'),
-            Option('--exchange', help='custom exchange name.'),
-            Option('--routing-key', help='custom routing key.'),
+        Option('--args', '-a', help='positional arguments (json).'),
+        Option('--kwargs', '-k', help='keyword arguments (json).'),
+        Option('--eta', help='scheduled time (ISO-8601).'),
+        Option('--countdown', type='float',
+               help='eta in seconds from now (float/int).'),
+        Option('--expires', help='expiry time (ISO-8601/float/int).'),
+        Option('--serializer', default='json', help='defaults to json.'),
+        Option('--queue', help='custom queue name.'),
+        Option('--exchange', help='custom exchange name.'),
+        Option('--routing-key', help='custom routing key.'),
     )
     )
 
 
     def run(self, name, *_, **kw):
     def run(self, name, *_, **kw):
@@ -213,9 +213,9 @@ class result(Command):
     """
     """
     args = '<task_id>'
     args = '<task_id>'
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-            Option('--task', '-t', help='name of task (if custom backend)'),
-            Option('--traceback', action='store_true',
-                   help='show traceback instead'),
+        Option('--task', '-t', help='name of task (if custom backend)'),
+        Option('--traceback', action='store_true',
+               help='show traceback instead'),
     )
     )
 
 
     def run(self, task_id, *args, **kwargs):
     def run(self, task_id, *args, **kwargs):
@@ -238,10 +238,10 @@ class _RemoteControl(Command):
     choices = None
     choices = None
     leaf = False
     leaf = False
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-                Option('--timeout', '-t', type='float',
-                    help='Timeout in seconds (float) waiting for reply'),
-                Option('--destination', '-d',
-                    help='Comma separated list of destination node names.'))
+        Option('--timeout', '-t', type='float',
+               help='Timeout in seconds (float) waiting for reply'),
+        Option('--destination', '-d',
+               help='Comma separated list of destination node names.'))
 
 
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
         self.show_body = kwargs.pop('show_body', True)
         self.show_body = kwargs.pop('show_body', True)
@@ -249,8 +249,8 @@ class _RemoteControl(Command):
         super(_RemoteControl, self).__init__(*args, **kwargs)
         super(_RemoteControl, self).__init__(*args, **kwargs)
 
 
     @classmethod
     @classmethod
-    def get_command_info(self, command, indent=0, prefix='', color=None,
-            help=False):
+    def get_command_info(self, command,
+                         indent=0, prefix='', color=None, help=False):
         if help:
         if help:
             help = '|' + text.indent(self.choices[command][1], indent + 4)
             help = '|' + text.indent(self.choices[command][1], indent + 4)
         else:
         else:
@@ -259,8 +259,9 @@ class _RemoteControl(Command):
             # see if it uses args.
             # see if it uses args.
             meth = getattr(self, command)
             meth = getattr(self, command)
             return text.join([
             return text.join([
-                '|' + text.indent('{0}{1} {2}'.format(prefix, color(command),
-                                                meth.__doc__), indent), help,
+                '|' + text.indent('{0}{1} {2}'.format(
+                    prefix, color(command), meth.__doc__), indent),
+                help,
             ])
             ])
 
 
         except AttributeError:
         except AttributeError:
@@ -273,7 +274,7 @@ class _RemoteControl(Command):
         color = color if color else lambda x: x
         color = color if color else lambda x: x
         prefix = prefix + ' ' if prefix else ''
         prefix = prefix + ' ' if prefix else ''
         return '\n'.join(self.get_command_info(c, indent, prefix, color, help)
         return '\n'.join(self.get_command_info(c, indent, prefix, color, help)
-                            for c in sorted(self.choices))
+                         for c in sorted(self.choices))
 
 
     @property
     @property
     def epilog(self):
     def epilog(self):
@@ -284,7 +285,7 @@ class _RemoteControl(Command):
 
 
     def usage(self, command):
     def usage(self, command):
         return '%prog {0} [options] {1} <command> [arg1 .. argN]'.format(
         return '%prog {0} [options] {1} <command> [arg1 .. argN]'.format(
-                command, self.args)
+            command, self.args)
 
 
     def call(self, *args, **kwargs):
     def call(self, *args, **kwargs):
         raise NotImplementedError('get_obj')
         raise NotImplementedError('get_obj')
@@ -384,10 +385,10 @@ class control(_RemoteControl):
         'disable_events': (1.0, 'tell worker(s) to disable events'),
         'disable_events': (1.0, 'tell worker(s) to disable events'),
         'add_consumer': (1.0, 'tell worker(s) to start consuming a queue'),
         'add_consumer': (1.0, 'tell worker(s) to start consuming a queue'),
         'cancel_consumer': (1.0, 'tell worker(s) to stop consuming a queue'),
         'cancel_consumer': (1.0, 'tell worker(s) to stop consuming a queue'),
-        'rate_limit': (1.0,
-            'tell worker(s) to modify the rate limit for a task type'),
-        'time_limit': (1.0,
-            'tell worker(s) to modify the time limit for a task type.'),
+        'rate_limit': (
+            1.0, 'tell worker(s) to modify the rate limit for a task type'),
+        'time_limit': (
+            1.0, 'tell worker(s) to modify the time limit for a task type.'),
         'autoscale': (1.0, 'change autoscale settings'),
         'autoscale': (1.0, 'change autoscale settings'),
         'pool_grow': (1.0, 'start more pool processes'),
         'pool_grow': (1.0, 'start more pool processes'),
         'pool_shrink': (1.0, 'use less pool processes'),
         'pool_shrink': (1.0, 'use less pool processes'),
@@ -417,7 +418,7 @@ class control(_RemoteControl):
         return self.call(method, task_name, soft, hard, reply=True, **kwargs)
         return self.call(method, task_name, soft, hard, reply=True, **kwargs)
 
 
     def add_consumer(self, method, queue, exchange=None,
     def add_consumer(self, method, queue, exchange=None,
-            exchange_type='direct', routing_key=None, **kwargs):
+                     exchange_type='direct', routing_key=None, **kwargs):
         """<queue> [exchange [type [routing_key]]]"""
         """<queue> [exchange [type [routing_key]]]"""
         return self.call(method, queue, exchange,
         return self.call(method, queue, exchange,
                          exchange_type, routing_key, reply=True, **kwargs)
                          exchange_type, routing_key, reply=True, **kwargs)
@@ -432,10 +433,12 @@ class status(Command):
     option_list = inspect.option_list
     option_list = inspect.option_list
 
 
     def run(self, *args, **kwargs):
     def run(self, *args, **kwargs):
-        I = inspect(app=self.app,
-                    no_color=kwargs.get('no_color', False),
-                    stdout=self.stdout, stderr=self.stderr,
-                    show_reply=False, show_body=False, quiet=True)
+        I = inspect(
+            app=self.app,
+            no_color=kwargs.get('no_color', False),
+            stdout=self.stdout, stderr=self.stderr,
+            show_reply=False, show_body=False, quiet=True,
+        )
         replies = I.run('ping', **kwargs)
         replies = I.run('ping', **kwargs)
         if not replies:
         if not replies:
             raise Error('No nodes replied within time constraint',
             raise Error('No nodes replied within time constraint',
@@ -459,18 +462,18 @@ class migrate(Command):
     """
     """
     args = '<source_url> <dest_url>'
     args = '<source_url> <dest_url>'
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-            Option('--limit', '-n', type='int',
-                    help='Number of tasks to consume (int)'),
-            Option('--timeout', '-t', type='float', default=1.0,
-                    help='Timeout in seconds (float) waiting for tasks'),
-            Option('--ack-messages', '-a', action='store_true',
-                    help='Ack messages from source broker.'),
-            Option('--tasks', '-T',
-                    help='List of task names to filter on.'),
-            Option('--queues', '-Q',
-                    help='List of queues to migrate.'),
-            Option('--forever', '-F', action='store_true',
-                    help='Continually migrate tasks until killed.'),
+        Option('--limit', '-n', type='int',
+               help='Number of tasks to consume (int)'),
+        Option('--timeout', '-t', type='float', default=1.0,
+               help='Timeout in seconds (float) waiting for tasks'),
+        Option('--ack-messages', '-a', action='store_true',
+               help='Ack messages from source broker.'),
+        Option('--tasks', '-T',
+               help='List of task names to filter on.'),
+        Option('--queues', '-Q',
+               help='List of queues to migrate.'),
+        Option('--forever', '-F', action='store_true',
+               help='Continually migrate tasks until killed.'),
     )
     )
     progress_fmt = MIGRATE_PROGRESS_FMT
     progress_fmt = MIGRATE_PROGRESS_FMT
 
 
@@ -515,20 +518,20 @@ class shell(Command):  # pragma: no cover
         <AsyncResult: 537b48c7-d6d3-427a-a24a-d1b4414035be>
         <AsyncResult: 537b48c7-d6d3-427a-a24a-d1b4414035be>
     """
     """
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-                Option('--ipython', '-I',
-                    action='store_true', dest='force_ipython',
-                    help='force iPython.'),
-                Option('--bpython', '-B',
-                    action='store_true', dest='force_bpython',
-                    help='force bpython.'),
-                Option('--python', '-P',
-                    action='store_true', dest='force_python',
-                    help='force default Python shell.'),
-                Option('--without-tasks', '-T', action='store_true',
-                    help="don't add tasks to locals."),
-                Option('--eventlet', action='store_true',
-                    help='use eventlet.'),
-                Option('--gevent', action='store_true', help='use gevent.'),
+        Option('--ipython', '-I',
+               action='store_true', dest='force_ipython',
+               help='force iPython.'),
+        Option('--bpython', '-B',
+               action='store_true', dest='force_bpython',
+               help='force bpython.'),
+        Option('--python', '-P',
+               action='store_true', dest='force_python',
+               help='force default Python shell.'),
+        Option('--without-tasks', '-T', action='store_true',
+               help="don't add tasks to locals."),
+        Option('--eventlet', action='store_true',
+               help='use eventlet.'),
+        Option('--gevent', action='store_true', help='use gevent.'),
     )
     )
 
 
     def run(self, force_ipython=False, force_bpython=False,
     def run(self, force_ipython=False, force_bpython=False,
@@ -553,9 +556,10 @@ class shell(Command):  # pragma: no cover
                        'subtask': celery.subtask}
                        'subtask': celery.subtask}
 
 
         if not without_tasks:
         if not without_tasks:
-            self.locals.update(dict((task.__name__, task)
-                                for task in values(self.app.tasks)
-                                    if not task.name.startswith('celery.')))
+            self.locals.update(dict(
+                (task.__name__, task) for task in values(self.app.tasks)
+                if not task.name.startswith('celery.')),
+            )
 
 
         if force_python:
         if force_python:
             return self.invoke_fallback_shell()
             return self.invoke_fallback_shell()
@@ -587,7 +591,7 @@ class shell(Command):  # pragma: no cover
         else:
         else:
             import rlcompleter
             import rlcompleter
             readline.set_completer(
             readline.set_completer(
-                    rlcompleter.Completer(self.locals).complete)
+                rlcompleter.Completer(self.locals).complete)
             readline.parse_and_bind('tab:complete')
             readline.parse_and_bind('tab:complete')
         code.interact(local=self.locals)
         code.interact(local=self.locals)
 
 
@@ -664,8 +668,9 @@ class CeleryCommand(Command):
             cls, argv = self.commands['help'], ['help']
             cls, argv = self.commands['help'], ['help']
         cls = self.commands.get(command) or self.commands['help']
         cls = self.commands.get(command) or self.commands['help']
         try:
         try:
-            return cls(app=self.app).run_from_argv(self.prog_name,
-                    argv[1:], command=argv[0])
+            return cls(app=self.app).run_from_argv(
+                self.prog_name, argv[1:], command=argv[0],
+            )
         except Error:
         except Error:
             return self.execute('help', argv)
             return self.execute('help', argv)
 
 
@@ -723,7 +728,7 @@ class CeleryCommand(Command):
             ret.extend([
             ret.extend([
                 text.indent('+ {0}: '.format(white(cls)), indent),
                 text.indent('+ {0}: '.format(white(cls)), indent),
                 '\n'.join(self.get_command_info(command, indent + 4, color)
                 '\n'.join(self.get_command_info(command, indent + 4, color)
-                            for command in commands),
+                          for command in commands),
                 ''
                 ''
             ])
             ])
         return '\n'.join(ret).strip()
         return '\n'.join(ret).strip()

+ 6 - 5
celery/bin/celeryd_detach.py

@@ -26,9 +26,10 @@ from celery.bin.base import daemon_options, Option
 logger = get_logger(__name__)
 logger = get_logger(__name__)
 
 
 OPTION_LIST = daemon_options(default_pidfile='celeryd.pid') + (
 OPTION_LIST = daemon_options(default_pidfile='celeryd.pid') + (
-                Option('--fake',
-                       default=False, action='store_true', dest='fake',
-                       help="Don't fork (for debugging purposes)"), )
+    Option('--fake',
+           default=False, action='store_true', dest='fake',
+           help="Don't fork (for debugging purposes)"),
+)
 
 
 
 
 def detach(path, argv, logfile=None, pidfile=None, uid=None,
 def detach(path, argv, logfile=None, pidfile=None, uid=None,
@@ -75,7 +76,7 @@ class PartialOptionParser(OptionParser):
                         self.error('{0} requires an argument'.format(opt))
                         self.error('{0} requires an argument'.format(opt))
                     else:
                     else:
                         self.error('{0} requires {1} arguments'.format(
                         self.error('{0} requires {1} arguments'.format(
-                                    opt, nargs))
+                            opt, nargs))
                 elif nargs == 1:
                 elif nargs == 1:
                     value = rargs.pop(0)
                     value = rargs.pop(0)
                 else:
                 else:
@@ -142,7 +143,7 @@ class detached_celeryd(object):
         options, values, leftovers = self.parse_options(prog_name, argv[1:])
         options, values, leftovers = self.parse_options(prog_name, argv[1:])
         sys.exit(detach(path=self.execv_path,
         sys.exit(detach(path=self.execv_path,
                  argv=self.execv_argv + leftovers + config,
                  argv=self.execv_argv + leftovers + config,
-                  **vars(options)))
+                 **vars(options)))
 
 
 
 
 def main():
 def main():

+ 2 - 2
celery/bin/events.py

@@ -95,8 +95,8 @@ class events(Command):
         return evtop(app=self.app)
         return evtop(app=self.app)
 
 
     def run_evcam(self, camera, logfile=None, pidfile=None, uid=None,
     def run_evcam(self, camera, logfile=None, pidfile=None, uid=None,
-            gid=None, umask=None, working_directory=None,
-            detach=False, **kwargs):
+                  gid=None, umask=None, working_directory=None,
+                  detach=False, **kwargs):
         from celery.events.snapshot import evcam
         from celery.events.snapshot import evcam
         workdir = working_directory
         workdir = working_directory
         self.set_process_status('cam')
         self.set_process_status('cam')

+ 1 - 1
celery/bin/graph.py

@@ -135,7 +135,7 @@ class graph(Command):
             abbr = max and size > max
             abbr = max and size > max
             if 'enumerate' in args:
             if 'enumerate' in args:
                 l = ['{0}{1}'.format(name, subscript(i + 1))
                 l = ['{0}{1}'.format(name, subscript(i + 1))
-                        for i, obj in enumerate(l)]
+                     for i, obj in enumerate(l)]
             if abbr:
             if abbr:
                 l = l[0:max - 1] + [l[size - 1]]
                 l = l[0:max - 1] + [l[size - 1]]
                 l[max - 2] = '{0}⎨…{1}⎬'.format(
                 l[max - 2] = '{0}⎨…{1}⎬'.format(

+ 12 - 10
celery/bin/multi.py

@@ -114,7 +114,7 @@ from celery.utils import term, nodesplit
 from celery.utils.text import pluralize
 from celery.utils.text import pluralize
 
 
 SIGNAMES = set(sig for sig in dir(signal)
 SIGNAMES = set(sig for sig in dir(signal)
-                        if sig.startswith('SIG') and '_' not in sig)
+               if sig.startswith('SIG') and '_' not in sig)
 SIGMAP = dict((getattr(signal, name), name) for name in SIGNAMES)
 SIGMAP = dict((getattr(signal, name), name) for name in SIGNAMES)
 
 
 USAGE = """\
 USAGE = """\
@@ -146,7 +146,7 @@ class MultiTool(object):
     retcode = 0  # Final exit code.
     retcode = 0  # Final exit code.
 
 
     def __init__(self, env=None, fh=None, quiet=False, verbose=False,
     def __init__(self, env=None, fh=None, quiet=False, verbose=False,
-            no_color=False, nosplash=False):
+                 no_color=False, nosplash=False):
         self.fh = fh or sys.stderr
         self.fh = fh or sys.stderr
         self.env = env
         self.env = env
         self.nosplash = nosplash
         self.nosplash = nosplash
@@ -197,8 +197,9 @@ class MultiTool(object):
 
 
     def names(self, argv, cmd):
     def names(self, argv, cmd):
         p = NamespacedOptionParser(argv)
         p = NamespacedOptionParser(argv)
-        self.say('\n'.join(hostname
-                        for hostname, _, _ in multi_args(p, cmd)))
+        self.say('\n'.join(
+            hostname for hostname, _, _ in multi_args(p, cmd)),
+        )
 
 
     def get(self, argv, cmd):
     def get(self, argv, cmd):
         wanted = argv[0]
         wanted = argv[0]
@@ -211,8 +212,9 @@ class MultiTool(object):
     def show(self, argv, cmd):
     def show(self, argv, cmd):
         p = NamespacedOptionParser(argv)
         p = NamespacedOptionParser(argv)
         self.note('> Starting nodes...')
         self.note('> Starting nodes...')
-        self.say('\n'.join(' '.join(worker)
-                        for _, worker, _ in multi_args(p, cmd)))
+        self.say('\n'.join(
+            ' '.join(worker) for _, worker, _ in multi_args(p, cmd)),
+        )
 
 
     def start(self, argv, cmd):
     def start(self, argv, cmd):
         self.splash()
         self.splash()
@@ -239,7 +241,7 @@ class MultiTool(object):
             if exc.errno != errno.ESRCH:
             if exc.errno != errno.ESRCH:
                 raise
                 raise
             self.note('Could not signal {0} ({1}): No such process'.format(
             self.note('Could not signal {0} ({1}): No such process'.format(
-                        nodename, pid))
+                nodename, pid))
             return False
             return False
         return True
         return True
 
 
@@ -253,7 +255,7 @@ class MultiTool(object):
         return True
         return True
 
 
     def shutdown_nodes(self, nodes, sig=signal.SIGTERM, retry=None,
     def shutdown_nodes(self, nodes, sig=signal.SIGTERM, retry=None,
-            callback=None):
+                       callback=None):
         if not nodes:
         if not nodes:
             return
             return
         P = set(nodes)
         P = set(nodes)
@@ -438,7 +440,7 @@ def multi_args(p, cmd='celery worker', append='', prefix='', suffix=''):
     cmd = options.pop('--cmd', cmd)
     cmd = options.pop('--cmd', cmd)
     append = options.pop('--append', append)
     append = options.pop('--append', append)
     hostname = options.pop('--hostname',
     hostname = options.pop('--hostname',
-                   options.pop('-n', socket.gethostname()))
+                           options.pop('-n', socket.gethostname()))
     prefix = options.pop('--prefix', prefix) or ''
     prefix = options.pop('--prefix', prefix) or ''
     suffix = options.pop('--suffix', suffix) or hostname
     suffix = options.pop('--suffix', suffix) or hostname
     if suffix in ('""', "''"):
     if suffix in ('""', "''"):
@@ -465,7 +467,7 @@ def multi_args(p, cmd='celery worker', append='', prefix='', suffix=''):
                                 '%d': this_suffix})
                                 '%d': this_suffix})
         argv = ([expand(cmd)] +
         argv = ([expand(cmd)] +
                 [format_opt(opt, expand(value))
                 [format_opt(opt, expand(value))
-                        for opt, value in items(p.optmerge(name, options))] +
+                 for opt, value in items(p.optmerge(name, options))] +
                 [passthrough])
                 [passthrough])
         if append:
         if append:
             argv.append(expand(append))
             argv.append(expand(append))

+ 9 - 9
celery/bin/worker.py

@@ -172,8 +172,8 @@ class worker(Command):
                 loglevel = mlevel(loglevel)
                 loglevel = mlevel(loglevel)
             except KeyError:  # pragma: no cover
             except KeyError:  # pragma: no cover
                 self.die('Unknown level {0!r}. Please use one of {1}.'.format(
                 self.die('Unknown level {0!r}. Please use one of {1}.'.format(
-                    loglevel, '|'.join(l for l in LOG_LEVELS
-                      if isinstance(l, string_t))))
+                    loglevel, '|'.join(
+                        l for l in LOG_LEVELS if isinstance(l, string_t))))
 
 
         return self.app.Worker(
         return self.app.Worker(
             hostname=hostname, pool_cls=pool_cls, loglevel=loglevel, **kwargs
             hostname=hostname, pool_cls=pool_cls, loglevel=loglevel, **kwargs
@@ -188,25 +188,25 @@ class worker(Command):
         conf = self.app.conf
         conf = self.app.conf
         return (
         return (
             Option('-c', '--concurrency',
             Option('-c', '--concurrency',
-                default=conf.CELERYD_CONCURRENCY, type='int'),
+                   default=conf.CELERYD_CONCURRENCY, type='int'),
             Option('-P', '--pool', default=conf.CELERYD_POOL, dest='pool_cls'),
             Option('-P', '--pool', default=conf.CELERYD_POOL, dest='pool_cls'),
             Option('--purge', '--discard', default=False, action='store_true'),
             Option('--purge', '--discard', default=False, action='store_true'),
             Option('-l', '--loglevel', default=conf.CELERYD_LOG_LEVEL),
             Option('-l', '--loglevel', default=conf.CELERYD_LOG_LEVEL),
             Option('-n', '--hostname'),
             Option('-n', '--hostname'),
             Option('-B', '--beat', action='store_true'),
             Option('-B', '--beat', action='store_true'),
             Option('-s', '--schedule', dest='schedule_filename',
             Option('-s', '--schedule', dest='schedule_filename',
-                default=conf.CELERYBEAT_SCHEDULE_FILENAME),
+                   default=conf.CELERYBEAT_SCHEDULE_FILENAME),
             Option('--scheduler', dest='scheduler_cls'),
             Option('--scheduler', dest='scheduler_cls'),
             Option('-S', '--statedb',
             Option('-S', '--statedb',
-                default=conf.CELERYD_STATE_DB, dest='state_db'),
+                   default=conf.CELERYD_STATE_DB, dest='state_db'),
             Option('-E', '--events', default=conf.CELERY_SEND_EVENTS,
             Option('-E', '--events', default=conf.CELERY_SEND_EVENTS,
-                action='store_true', dest='send_events'),
+                   action='store_true', dest='send_events'),
             Option('--time-limit', type='float', dest='task_time_limit',
             Option('--time-limit', type='float', dest='task_time_limit',
-                default=conf.CELERYD_TASK_TIME_LIMIT),
+                   default=conf.CELERYD_TASK_TIME_LIMIT),
             Option('--soft-time-limit', dest='task_soft_time_limit',
             Option('--soft-time-limit', dest='task_soft_time_limit',
-                default=conf.CELERYD_TASK_SOFT_TIME_LIMIT, type='float'),
+                   default=conf.CELERYD_TASK_SOFT_TIME_LIMIT, type='float'),
             Option('--maxtasksperchild', dest='max_tasks_per_child',
             Option('--maxtasksperchild', dest='max_tasks_per_child',
-                default=conf.CELERYD_MAX_TASKS_PER_CHILD, type='int'),
+                   default=conf.CELERYD_MAX_TASKS_PER_CHILD, type='int'),
             Option('--queues', '-Q', default=[]),
             Option('--queues', '-Q', default=[]),
             Option('--include', '-I', default=[]),
             Option('--include', '-I', default=[]),
             Option('--autoscale'),
             Option('--autoscale'),

+ 4 - 3
celery/bootsteps.py

@@ -58,7 +58,8 @@ class StepFormatter(GraphFormatter):
     }
     }
 
 
     def label(self, step):
     def label(self, step):
-        return step and '{0}{1}'.format(self._get_prefix(step),
+        return step and '{0}{1}'.format(
+            self._get_prefix(step),
             (step.label or _label(step)).encode('utf-8', 'ignore'),
             (step.label or _label(step)).encode('utf-8', 'ignore'),
         )
         )
 
 
@@ -97,8 +98,8 @@ class Namespace(object):
     started = 0
     started = 0
     default_steps = set()
     default_steps = set()
 
 
-    def __init__(self, steps=None, name=None, app=None, on_start=None,
-            on_close=None, on_stopped=None):
+    def __init__(self, steps=None, name=None, app=None,
+                 on_start=None, on_close=None, on_stopped=None):
         self.app = app
         self.app = app
         self.name = name or self.name or qualname(type(self))
         self.name = name or self.name or qualname(type(self))
         self.types = set(steps or []) | set(self.default_steps)
         self.types = set(steps or []) | set(self.default_steps)

+ 39 - 25
celery/canvas.py

@@ -81,7 +81,7 @@ class Signature(dict):
         return Signature(d)
         return Signature(d)
 
 
     def __init__(self, task=None, args=None, kwargs=None, options=None,
     def __init__(self, task=None, args=None, kwargs=None, options=None,
-                type=None, subtask_type=None, immutable=False, **ex):
+                 type=None, subtask_type=None, immutable=False, **ex):
         init = dict.__init__
         init = dict.__init__
 
 
         if isinstance(task, dict):
         if isinstance(task, dict):
@@ -95,11 +95,12 @@ class Signature(dict):
         else:
         else:
             self._type = task
             self._type = task
 
 
-        init(self, task=task_name, args=tuple(args or ()),
-                                   kwargs=kwargs or {},
-                                   options=dict(options or {}, **ex),
-                                   subtask_type=subtask_type,
-                                   immutable=immutable)
+        init(self,
+             task=task_name, args=tuple(args or ()),
+             kwargs=kwargs or {},
+             options=dict(options or {}, **ex),
+             subtask_type=subtask_type,
+             immutable=immutable)
 
 
     def __call__(self, *partial_args, **partial_kwargs):
     def __call__(self, *partial_args, **partial_kwargs):
         return self.apply_async(partial_args, partial_kwargs)
         return self.apply_async(partial_args, partial_kwargs)
@@ -171,9 +172,11 @@ class Signature(dict):
         return self.append_to_list_option('link_error', errback)
         return self.append_to_list_option('link_error', errback)
 
 
     def flatten_links(self):
     def flatten_links(self):
-        return list(_chain.from_iterable(_chain([[self]],
-                (link.flatten_links()
-                    for link in maybe_list(self.options.get('link')) or []))))
+        return list(_chain.from_iterable(_chain(
+            [[self]],
+            (link.flatten_links()
+                for link in maybe_list(self.options.get('link')) or [])
+        )))
 
 
     def __or__(self, other):
     def __or__(self, other):
         if not isinstance(self, chain) and isinstance(other, chain):
         if not isinstance(self, chain) and isinstance(other, chain):
@@ -245,8 +248,9 @@ class chain(Signature):
 
 
     def __init__(self, *tasks, **options):
     def __init__(self, *tasks, **options):
         tasks = tasks[0] if len(tasks) == 1 and is_list(tasks[0]) else tasks
         tasks = tasks[0] if len(tasks) == 1 and is_list(tasks[0]) else tasks
-        Signature.__init__(self,
-            'celery.chain', (), {'tasks': tasks}, **options)
+        Signature.__init__(
+            self, 'celery.chain', (), {'tasks': tasks}, **options
+        )
         self.tasks = tasks
         self.tasks = tasks
         self.subtask_type = 'chain'
         self.subtask_type = 'chain'
 
 
@@ -271,14 +275,17 @@ class _basemap(Signature):
     _unpack_args = itemgetter('task', 'it')
     _unpack_args = itemgetter('task', 'it')
 
 
     def __init__(self, task, it, **options):
     def __init__(self, task, it, **options):
-        Signature.__init__(self, self._task_name, (),
-                {'task': task, 'it': regen(it)}, immutable=True, **options)
+        Signature.__init__(
+            self, self._task_name, (),
+            {'task': task, 'it': regen(it)}, immutable=True, **options
+        )
 
 
     def apply_async(self, args=(), kwargs={}, **opts):
     def apply_async(self, args=(), kwargs={}, **opts):
         # need to evaluate generators
         # need to evaluate generators
         task, it = self._unpack_args(self.kwargs)
         task, it = self._unpack_args(self.kwargs)
-        return self.type.apply_async((),
-                {'task': task, 'it': list(it)}, **opts)
+        return self.type.apply_async(
+            (), {'task': task, 'it': list(it)}, **opts
+        )
 
 
     @classmethod
     @classmethod
     def from_dict(self, d):
     def from_dict(self, d):
@@ -309,9 +316,11 @@ class chunks(Signature):
     _unpack_args = itemgetter('task', 'it', 'n')
     _unpack_args = itemgetter('task', 'it', 'n')
 
 
     def __init__(self, task, it, n, **options):
     def __init__(self, task, it, n, **options):
-        Signature.__init__(self, 'celery.chunks', (),
-                {'task': task, 'it': regen(it), 'n': n},
-                immutable=True, **options)
+        Signature.__init__(
+            self, 'celery.chunks', (),
+            {'task': task, 'it': regen(it), 'n': n},
+            immutable=True, **options
+        )
 
 
     @classmethod
     @classmethod
     def from_dict(self, d):
     def from_dict(self, d):
@@ -347,8 +356,9 @@ class group(Signature):
     def __init__(self, *tasks, **options):
     def __init__(self, *tasks, **options):
         if len(tasks) == 1:
         if len(tasks) == 1:
             tasks = _maybe_group(tasks[0])
             tasks = _maybe_group(tasks[0])
-        Signature.__init__(self,
-            'celery.group', (), {'tasks': tasks}, **options)
+        Signature.__init__(
+            self, 'celery.group', (), {'tasks': tasks}, **options
+        )
         self.tasks, self.subtask_type = tasks, 'group'
         self.tasks, self.subtask_type = tasks, 'group'
 
 
     @classmethod
     @classmethod
@@ -361,8 +371,9 @@ class group(Signature):
         return group(tasks, **kwdict(d['options']))
         return group(tasks, **kwdict(d['options']))
 
 
     def __call__(self, *partial_args, **options):
     def __call__(self, *partial_args, **options):
-        tasks, result, gid, args = self.type.prepare(options,
-                    [Signature.clone(t) for t in self.tasks], partial_args)
+        tasks, result, gid, args = self.type.prepare(
+            options, [Signature.clone(t) for t in self.tasks], partial_args,
+        )
         return self.type(tasks, result, gid, args)
         return self.type(tasks, result, gid, args)
 
 
     def _freeze(self, _id=None):
     def _freeze(self, _id=None):
@@ -397,9 +408,12 @@ class chord(Signature):
     Chord = Chord
     Chord = Chord
 
 
     def __init__(self, header, body=None, task='celery.chord',
     def __init__(self, header, body=None, task='celery.chord',
-            args=(), kwargs={}, **options):
-        Signature.__init__(self, task, args, dict(kwargs,
-            header=_maybe_group(header), body=maybe_subtask(body)), **options)
+                 args=(), kwargs={}, **options):
+        Signature.__init__(
+            self, task, args,
+            dict(kwargs, header=_maybe_group(header),
+                 body=maybe_subtask(body)), **options
+        )
         self.subtask_type = 'chord'
         self.subtask_type = 'chord'
 
 
     @classmethod
     @classmethod

+ 5 - 5
celery/concurrency/base.py

@@ -21,7 +21,7 @@ logger = get_logger('celery.pool')
 
 
 
 
 def apply_target(target, args=(), kwargs={}, callback=None,
 def apply_target(target, args=(), kwargs={}, callback=None,
-        accept_callback=None, pid=None, **_):
+                 accept_callback=None, pid=None, **_):
     if accept_callback:
     if accept_callback:
         accept_callback(pid or os.getpid(), time.time())
         accept_callback(pid or os.getpid(), time.time())
     callback(target(*args, **kwargs))
     callback(target(*args, **kwargs))
@@ -56,8 +56,8 @@ class BasePool(object):
     #: only used by multiprocessing pool
     #: only used by multiprocessing pool
     uses_semaphore = False
     uses_semaphore = False
 
 
-    def __init__(self, limit=None, putlocks=True, forking_enable=True,
-            **options):
+    def __init__(self, limit=None, putlocks=True,
+                 forking_enable=True, **options):
         self.limit = limit
         self.limit = limit
         self.putlocks = putlocks
         self.putlocks = putlocks
         self.options = options
         self.options = options
@@ -93,11 +93,11 @@ class BasePool(object):
 
 
     def terminate_job(self, pid):
     def terminate_job(self, pid):
         raise NotImplementedError(
         raise NotImplementedError(
-                '{0} does not implement kill_job'.format(type(self)))
+            '{0} does not implement kill_job'.format(type(self)))
 
 
     def restart(self):
     def restart(self):
         raise NotImplementedError(
         raise NotImplementedError(
-                '{0} does not implement restart'.format(type(self)))
+            '{0} does not implement restart'.format(type(self)))
 
 
     def stop(self):
     def stop(self):
         self.on_stop()
         self.on_stop()

+ 4 - 3
celery/concurrency/eventlet.py

@@ -144,9 +144,10 @@ class TaskPool(base.BasePool):
         signals.eventlet_pool_postshutdown.send(sender=self)
         signals.eventlet_pool_postshutdown.send(sender=self)
 
 
     def on_apply(self, target, args=None, kwargs=None, callback=None,
     def on_apply(self, target, args=None, kwargs=None, callback=None,
-            accept_callback=None, **_):
-        self._quick_apply_sig(sender=self,
-                target=target, args=args, kwargs=kwargs)
+                 accept_callback=None, **_):
+        self._quick_apply_sig(
+            sender=self, target=target, args=args, kwargs=kwargs,
+        )
         self._quick_put(apply_target, target, args, kwargs,
         self._quick_put(apply_target, target, args, kwargs,
                         callback, accept_callback,
                         callback, accept_callback,
                         self.getpid)
                         self.getpid)

+ 2 - 1
celery/concurrency/gevent.py

@@ -127,7 +127,8 @@ class TaskPool(BasePool):
             self._pool.join()
             self._pool.join()
 
 
     def on_apply(self, target, args=None, kwargs=None, callback=None,
     def on_apply(self, target, args=None, kwargs=None, callback=None,
-            accept_callback=None, timeout=None, timeout_callback=None, **_):
+                 accept_callback=None, timeout=None,
+                 timeout_callback=None, **_):
         timeout = self.timeout if timeout is None else timeout
         timeout = self.timeout if timeout is None else timeout
         return self._quick_put(apply_timeout if timeout else apply_target,
         return self._quick_put(apply_timeout if timeout else apply_target,
                                target, args, kwargs, callback, accept_callback,
                                target, args, kwargs, callback, accept_callback,

+ 2 - 2
celery/concurrency/threads.py

@@ -26,7 +26,7 @@ class TaskPool(BasePool):
             import threadpool
             import threadpool
         except ImportError:
         except ImportError:
             raise ImportError(
             raise ImportError(
-                    'The threaded pool requires the threadpool module.')
+                'The threaded pool requires the threadpool module.')
         self.WorkRequest = threadpool.WorkRequest
         self.WorkRequest = threadpool.WorkRequest
         self.ThreadPool = threadpool.ThreadPool
         self.ThreadPool = threadpool.ThreadPool
         super(TaskPool, self).__init__(*args, **kwargs)
         super(TaskPool, self).__init__(*args, **kwargs)
@@ -43,7 +43,7 @@ class TaskPool(BasePool):
         self._pool.dismissWorkers(self.limit, do_join=True)
         self._pool.dismissWorkers(self.limit, do_join=True)
 
 
     def on_apply(self, target, args=None, kwargs=None, callback=None,
     def on_apply(self, target, args=None, kwargs=None, callback=None,
-            accept_callback=None, **_):
+                 accept_callback=None, **_):
         req = self.WorkRequest(apply_target, (target, args, kwargs, callback,
         req = self.WorkRequest(apply_target, (target, args, kwargs, callback,
                                               accept_callback))
                                               accept_callback))
         self._quick_put(req)
         self._quick_put(req)

+ 7 - 5
celery/contrib/batches.py

@@ -211,7 +211,7 @@ class Batches(Task):
 
 
     def flush(self, requests):
     def flush(self, requests):
         return self.apply_buffer(requests, ([SimpleRequest.from_request(r)
         return self.apply_buffer(requests, ([SimpleRequest.from_request(r)
-                                                for r in requests], ))
+                                             for r in requests], ))
 
 
     def _do_flush(self):
     def _do_flush(self):
         logger.debug('Batches: Wake-up to flush buffer...')
         logger.debug('Batches: Wake-up to flush buffer...')
@@ -237,7 +237,9 @@ class Batches(Task):
         def on_return(result):
         def on_return(result):
             [req.acknowledge() for req in acks_late[True]]
             [req.acknowledge() for req in acks_late[True]]
 
 
-        return self._pool.apply_async(apply_batches_task,
-                    (self, args, 0, None),
-                    accept_callback=on_accepted,
-                    callback=acks_late[True] and on_return or noop)
+        return self._pool.apply_async(
+            apply_batches_task,
+            (self, args, 0, None),
+            accept_callback=on_accepted,
+            callback=acks_late[True] and on_return or noop,
+        )

+ 32 - 16
celery/contrib/bundles.py

@@ -21,29 +21,45 @@ django_celery = Dist('django-celery', VERSION, **defaults)
 flask_celery = Dist('Flask-Celery', VERSION, **defaults)
 flask_celery = Dist('Flask-Celery', VERSION, **defaults)
 
 
 bundles = [
 bundles = [
-    celery.Bundle('celery-with-redis',
+    celery.Bundle(
+        'celery-with-redis',
         'Bundle installing the dependencies for Celery and Redis',
         'Bundle installing the dependencies for Celery and Redis',
-        requires=['redis>=2.4.4']),
-    celery.Bundle('celery-with-mongodb',
+        requires=['redis>=2.4.4'],
+    ),
+    celery.Bundle(
+        'celery-with-mongodb',
         'Bundle installing the dependencies for Celery and MongoDB',
         'Bundle installing the dependencies for Celery and MongoDB',
-        requires=['pymongo']),
-    celery.Bundle('celery-with-couchdb',
+        requires=['pymongo'],
+    ),
+    celery.Bundle(
+        'celery-with-couchdb',
         'Bundle installing the dependencies for Celery and CouchDB',
         'Bundle installing the dependencies for Celery and CouchDB',
-        requires=['couchdb']),
-    celery.Bundle('celery-with-beanstalk',
+        requires=['couchdb'],
+    ),
+    celery.Bundle(
+        'celery-with-beanstalk',
         'Bundle installing the dependencies for Celery and Beanstalk',
         'Bundle installing the dependencies for Celery and Beanstalk',
-        requires=['beanstalkc']),
+        requires=['beanstalkc'],
+    ),
 
 
-    django_celery.Bundle('django-celery-with-redis',
+    django_celery.Bundle(
+        'django-celery-with-redis',
         'Bundle installing the dependencies for Django-Celery and Redis',
         'Bundle installing the dependencies for Django-Celery and Redis',
-        requires=['redis>=2.4.4']),
-    django_celery.Bundle('django-celery-with-mongodb',
+        requires=['redis>=2.4.4'],
+    ),
+    django_celery.Bundle(
+        'django-celery-with-mongodb',
         'Bundle installing the dependencies for Django-Celery and MongoDB',
         'Bundle installing the dependencies for Django-Celery and MongoDB',
-        requires=['pymongo']),
-    django_celery.Bundle('django-celery-with-couchdb',
+        requires=['pymongo'],
+    ),
+    django_celery.Bundle(
+        'django-celery-with-couchdb',
         'Bundle installing the dependencies for Django-Celery and CouchDB',
         'Bundle installing the dependencies for Django-Celery and CouchDB',
-        requires=['couchdb']),
-    django_celery.Bundle('django-celery-with-beanstalk',
+        requires=['couchdb'],
+    ),
+    django_celery.Bundle(
+        'django-celery-with-beanstalk',
         'Bundle installing the dependencies for Django-Celery and Beanstalk',
         'Bundle installing the dependencies for Django-Celery and Beanstalk',
-        requires=['beanstalkc']),
+        requires=['beanstalkc'],
+    ),
 ]
 ]

+ 12 - 12
celery/contrib/migrate.py

@@ -51,10 +51,10 @@ class State(object):
 
 
 
 
 def republish(producer, message, exchange=None, routing_key=None,
 def republish(producer, message, exchange=None, routing_key=None,
-        remove_props=['application_headers',
-                      'content_type',
-                      'content_encoding',
-                      'headers']):
+              remove_props=['application_headers',
+                            'content_type',
+                            'content_encoding',
+                            'headers']):
     body = ensure_bytes(message.body)  # use raw message body.
     body = ensure_bytes(message.body)  # use raw message body.
     info, headers, props = (message.delivery_info,
     info, headers, props = (message.delivery_info,
                             message.headers, message.properties)
                             message.headers, message.properties)
@@ -93,7 +93,7 @@ def filter_callback(callback, tasks):
 
 
 
 
 def migrate_tasks(source, dest, migrate=migrate_task, app=None,
 def migrate_tasks(source, dest, migrate=migrate_task, app=None,
-        queues=None, **kwargs):
+                  queues=None, **kwargs):
     app = app_or_default(app)
     app = app_or_default(app)
     queues = prepare_queues(queues)
     queues = prepare_queues(queues)
     producer = app.amqp.TaskProducer(dest)
     producer = app.amqp.TaskProducer(dest)
@@ -120,8 +120,8 @@ def _maybe_queue(app, q):
 
 
 
 
 def move(predicate, connection=None, exchange=None, routing_key=None,
 def move(predicate, connection=None, exchange=None, routing_key=None,
-        source=None, app=None, callback=None, limit=None, transform=None,
-        **kwargs):
+         source=None, app=None, callback=None, limit=None, transform=None,
+         **kwargs):
     """Find tasks by filtering them and move the tasks to a new queue.
     """Find tasks by filtering them and move the tasks to a new queue.
 
 
     :param predicate: Filter function used to decide which messages
     :param predicate: Filter function used to decide which messages
@@ -197,7 +197,7 @@ def move(predicate, connection=None, exchange=None, routing_key=None,
                 else:
                 else:
                     ex, rk = expand_dest(ret, exchange, routing_key)
                     ex, rk = expand_dest(ret, exchange, routing_key)
                 republish(producer, message,
                 republish(producer, message,
-                        exchange=ex, routing_key=rk)
+                          exchange=ex, routing_key=rk)
                 message.ack()
                 message.ack()
 
 
                 state.filtered += 1
                 state.filtered += 1
@@ -230,16 +230,16 @@ def prepare_queues(queues):
         queues = queues.split(',')
         queues = queues.split(',')
     if isinstance(queues, list):
     if isinstance(queues, list):
         queues = dict(tuple(islice(cycle(q.split(':')), None, 2))
         queues = dict(tuple(islice(cycle(q.split(':')), None, 2))
-                        for q in queues)
+                      for q in queues)
     if queues is None:
     if queues is None:
         queues = {}
         queues = {}
     return queues
     return queues
 
 
 
 
 def start_filter(app, conn, filter, limit=None, timeout=1.0,
 def start_filter(app, conn, filter, limit=None, timeout=1.0,
-        ack_messages=False, tasks=None, queues=None,
-        callback=None, forever=False, on_declare_queue=None,
-        consume_from=None, state=None, **kwargs):
+                 ack_messages=False, tasks=None, queues=None,
+                 callback=None, forever=False, on_declare_queue=None,
+                 consume_from=None, state=None, **kwargs):
     state = state or State()
     state = state or State()
     queues = prepare_queues(queues)
     queues = prepare_queues(queues)
     if isinstance(tasks, string_t):
     if isinstance(tasks, string_t):

+ 5 - 4
celery/contrib/rdb.py

@@ -82,14 +82,15 @@ class Rdb(Pdb):
     _sock = None
     _sock = None
 
 
     def __init__(self, host=CELERY_RDB_HOST, port=CELERY_RDB_PORT,
     def __init__(self, host=CELERY_RDB_HOST, port=CELERY_RDB_PORT,
-            port_search_limit=100, port_skew=+0, out=sys.stdout):
+                 port_search_limit=100, port_skew=+0, out=sys.stdout):
         self.active = True
         self.active = True
         self.out = out
         self.out = out
 
 
         self._prev_handles = sys.stdin, sys.stdout
         self._prev_handles = sys.stdin, sys.stdout
 
 
-        self._sock, this_port = self.get_avail_port(host, port,
-            port_search_limit, port_skew)
+        self._sock, this_port = self.get_avail_port(
+            host, port, port_search_limit, port_skew,
+        )
         self._sock.listen(1)
         self._sock.listen(1)
         self.ident = '{0}:{1}'.format(self.me, this_port)
         self.ident = '{0}:{1}'.format(self.me, this_port)
         self.host = host
         self.host = host
@@ -101,7 +102,7 @@ class Rdb(Pdb):
         self.say(SESSION_STARTED.format(self=self))
         self.say(SESSION_STARTED.format(self=self))
         self._handle = sys.stdin = sys.stdout = self._client.makefile('rw')
         self._handle = sys.stdin = sys.stdout = self._client.makefile('rw')
         Pdb.__init__(self, completekey='tab',
         Pdb.__init__(self, completekey='tab',
-                           stdin=self._handle, stdout=self._handle)
+                     stdin=self._handle, stdout=self._handle)
 
 
     def get_avail_port(self, host, port, search_limit=100, skew=+0):
     def get_avail_port(self, host, port, search_limit=100, skew=+0):
         try:
         try:

+ 3 - 3
celery/datastructures.py

@@ -60,7 +60,7 @@ class GraphFormatter(object):
     graph_scheme = {'bgcolor': 'mintcream'}
     graph_scheme = {'bgcolor': 'mintcream'}
 
 
     def __init__(self, root=None, type=None, id=None,
     def __init__(self, root=None, type=None, id=None,
-            indent=0, inw=' ' * 4, **scheme):
+                 indent=0, inw=' ' * 4, **scheme):
         self.id = id or 'dependencies'
         self.id = id or 'dependencies'
         self.root = root
         self.root = root
         self.type = type or 'digraph'
         self.type = type or 'digraph'
@@ -175,8 +175,8 @@ class DependencyGraph(object):
         components = self._tarjan72()
         components = self._tarjan72()
 
 
         NC = dict((node, component)
         NC = dict((node, component)
-                    for component in components
-                        for node in component)
+                  for component in components
+                  for node in component)
         for component in components:
         for component in components:
             graph.add_arc(component)
             graph.add_arc(component)
         for node in self:
         for node in self:

+ 11 - 10
celery/events/__init__.py

@@ -86,8 +86,8 @@ class EventDispatcher(object):
     DISABLED_TRANSPORTS = set(['sql'])
     DISABLED_TRANSPORTS = set(['sql'])
 
 
     def __init__(self, connection=None, hostname=None, enabled=True,
     def __init__(self, connection=None, hostname=None, enabled=True,
-            channel=None, buffer_while_offline=True, app=None,
-            serializer=None, groups=None):
+                 channel=None, buffer_while_offline=True, app=None,
+                 serializer=None, groups=None):
         self.app = app_or_default(app or self.app)
         self.app = app_or_default(app or self.app)
         self.connection = connection
         self.connection = connection
         self.channel = channel
         self.channel = channel
@@ -140,7 +140,7 @@ class EventDispatcher(object):
                 callback()
                 callback()
 
 
     def send(self, type, utcoffset=utcoffset, blind=False,
     def send(self, type, utcoffset=utcoffset, blind=False,
-            Event=Event, **fields):
+             Event=Event, **fields):
         """Send event.
         """Send event.
 
 
         :param type: Kind of event.
         :param type: Kind of event.
@@ -157,10 +157,11 @@ class EventDispatcher(object):
             clock = None if blind else self.clock.forward()
             clock = None if blind else self.clock.forward()
 
 
             with self.mutex:
             with self.mutex:
-                event = Event(type, hostname=self.hostname,
-                                    clock=clock,
-                                    utcoffset=utcoffset(),
-                                    pid=self.pid, **fields)
+                event = Event(type,
+                              hostname=self.hostname,
+                              clock=clock,
+                              utcoffset=utcoffset(),
+                              pid=self.pid, **fields)
                 try:
                 try:
                     self.publisher.publish(event,
                     self.publisher.publish(event,
                                            routing_key=type.replace('-', '.'),
                                            routing_key=type.replace('-', '.'),
@@ -200,7 +201,7 @@ class EventReceiver(ConsumerMixin):
     """
     """
 
 
     def __init__(self, connection, handlers=None, routing_key='#',
     def __init__(self, connection, handlers=None, routing_key='#',
-            node_id=None, app=None, queue_prefix='celeryev'):
+                 node_id=None, app=None, queue_prefix='celeryev'):
         self.app = app_or_default(app)
         self.app = app_or_default(app)
         self.connection = connection
         self.connection = connection
         self.handlers = {} if handlers is None else handlers
         self.handlers = {} if handlers is None else handlers
@@ -228,7 +229,7 @@ class EventReceiver(ConsumerMixin):
                          callbacks=[self._receive], no_ack=True)]
                          callbacks=[self._receive], no_ack=True)]
 
 
     def on_consume_ready(self, connection, channel, consumers,
     def on_consume_ready(self, connection, channel, consumers,
-            wakeup=True, **kwargs):
+                         wakeup=True, **kwargs):
         if wakeup:
         if wakeup:
             self.wakeup_workers(channel=channel)
             self.wakeup_workers(channel=channel)
 
 
@@ -290,7 +291,7 @@ class Events(object):
 
 
     @contextmanager
     @contextmanager
     def default_dispatcher(self, hostname=None, enabled=True,
     def default_dispatcher(self, hostname=None, enabled=True,
-            buffer_while_offline=False):
+                           buffer_while_offline=False):
         with self.app.amqp.producer_pool.acquire(block=True) as pub:
         with self.app.amqp.producer_pool.acquire(block=True) as pub:
             with self.Dispatcher(pub.connection, hostname, enabled,
             with self.Dispatcher(pub.connection, hostname, enabled,
                                  pub.channel, buffer_while_offline) as d:
                                  pub.channel, buffer_while_offline) as d:

+ 48 - 34
celery/events/cursesmon.py

@@ -199,9 +199,10 @@ class CursesMonitor(object):  # pragma: no cover
         def callback(my, mx, xs):
         def callback(my, mx, xs):
             y = count(xs)
             y = count(xs)
             if not reply:
             if not reply:
-                self.win.addstr(next(y), 3,
-                        'No replies received in 1s deadline.',
-                        curses.A_BOLD + curses.color_pair(2))
+                self.win.addstr(
+                    next(y), 3, 'No replies received in 1s deadline.',
+                    curses.A_BOLD + curses.color_pair(2),
+                )
                 return
                 return
 
 
             for subreply in reply:
             for subreply in reply:
@@ -269,9 +270,10 @@ class CursesMonitor(object):  # pragma: no cover
                 self.win.addstr(curline, 3, keys, curses.A_BOLD)
                 self.win.addstr(curline, 3, keys, curses.A_BOLD)
                 wrapped = wrap(value, mx - 2)
                 wrapped = wrap(value, mx - 2)
                 if len(wrapped) == 1:
                 if len(wrapped) == 1:
-                    self.win.addstr(curline, len(keys) + 3,
-                            abbr(wrapped[0],
-                                 self.screen_width - (len(keys) + 3)))
+                    self.win.addstr(
+                        curline, len(keys) + 3,
+                        abbr(wrapped[0],
+                             self.screen_width - (len(keys) + 3)))
                 else:
                 else:
                     for subline in wrapped:
                     for subline in wrapped:
                         nexty = next(y)
                         nexty = next(y)
@@ -279,12 +281,15 @@ class CursesMonitor(object):  # pragma: no cover
                             subline = ' ' * 4 + '[...]'
                             subline = ' ' * 4 + '[...]'
                         elif nexty >= my:
                         elif nexty >= my:
                             break
                             break
-                        self.win.addstr(nexty, 3,
-                                abbr(' ' * 4 + subline, self.screen_width - 4),
-                                curses.A_NORMAL)
+                        self.win.addstr(
+                            nexty, 3,
+                            abbr(' ' * 4 + subline, self.screen_width - 4),
+                            curses.A_NORMAL,
+                        )
 
 
-        return self.alert(alert_callback,
-                'Task details for {0.selected_task}'.format(self))
+        return self.alert(
+            alert_callback, 'Task details for {0.selected_task}'.format(self),
+        )
 
 
     def selection_traceback(self):
     def selection_traceback(self):
         if not self.selected_task:
         if not self.selected_task:
@@ -298,8 +303,10 @@ class CursesMonitor(object):  # pragma: no cover
             for line in task.traceback.split('\n'):
             for line in task.traceback.split('\n'):
                 self.win.addstr(next(y), 3, line)
                 self.win.addstr(next(y), 3, line)
 
 
-        return self.alert(alert_callback,
-                'Task Exception Traceback for {0.selected_task}'.format(self))
+        return self.alert(
+            alert_callback,
+            'Task Exception Traceback for {0.selected_task}'.format(self),
+        )
 
 
     def selection_result(self):
     def selection_result(self):
         if not self.selected_task:
         if not self.selected_task:
@@ -308,13 +315,15 @@ class CursesMonitor(object):  # pragma: no cover
         def alert_callback(my, mx, xs):
         def alert_callback(my, mx, xs):
             y = count(xs)
             y = count(xs)
             task = self.state.tasks[self.selected_task]
             task = self.state.tasks[self.selected_task]
-            result = getattr(task, 'result', None) or getattr(task,
-                    'exception', None)
+            result = (getattr(task, 'result', None)
+                      or getattr(task, 'exception', None))
             for line in wrap(result, mx - 2):
             for line in wrap(result, mx - 2):
                 self.win.addstr(next(y), 3, line)
                 self.win.addstr(next(y), 3, line)
 
 
-        return self.alert(alert_callback,
-                'Task Result for {0.selected_task}'.format(self))
+        return self.alert(
+            alert_callback,
+            'Task Result for {0.selected_task}'.format(self),
+        )
 
 
     def display_task_row(self, lineno, task):
     def display_task_row(self, lineno, task):
         state_color = self.state_colors.get(task.state)
         state_color = self.state_colors.get(task.state)
@@ -322,7 +331,8 @@ class CursesMonitor(object):  # pragma: no cover
         if task.uuid == self.selected_task:
         if task.uuid == self.selected_task:
             attr = curses.A_STANDOUT
             attr = curses.A_STANDOUT
         timestamp = datetime.utcfromtimestamp(
         timestamp = datetime.utcfromtimestamp(
-                        task.timestamp or time.time())
+            task.timestamp or time.time(),
+        )
         timef = timestamp.strftime('%H:%M:%S')
         timef = timestamp.strftime('%H:%M:%S')
         hostname = task.worker.hostname if task.worker else '*NONE*'
         hostname = task.worker.hostname if task.worker else '*NONE*'
         line = self.format_row(task.uuid, task.name,
         line = self.format_row(task.uuid, task.name,
@@ -347,8 +357,8 @@ class CursesMonitor(object):  # pragma: no cover
         win.addstr(1, x, self.greet, curses.A_DIM | curses.color_pair(5))
         win.addstr(1, x, self.greet, curses.A_DIM | curses.color_pair(5))
         next(blank_line)
         next(blank_line)
         win.addstr(next(y), x, self.format_row('UUID', 'TASK',
         win.addstr(next(y), x, self.format_row('UUID', 'TASK',
-                                           'WORKER', 'TIME', 'STATE'),
-                curses.A_BOLD | curses.A_UNDERLINE)
+                                               'WORKER', 'TIME', 'STATE'),
+                   curses.A_BOLD | curses.A_UNDERLINE)
         tasks = self.tasks
         tasks = self.tasks
         if tasks:
         if tasks:
             for row, (uuid, task) in enumerate(tasks):
             for row, (uuid, task) in enumerate(tasks):
@@ -378,8 +388,9 @@ class CursesMonitor(object):  # pragma: no cover
                     info['runtime'] = '{0:.2fs}'.format(info['runtime'])
                     info['runtime'] = '{0:.2fs}'.format(info['runtime'])
                 if 'result' in info:
                 if 'result' in info:
                     info['result'] = abbr(info['result'], 16)
                     info['result'] = abbr(info['result'], 16)
-                info = ' '.join('{0}={1}'.format(key, value)
-                            for key, value in items(info))
+                info = ' '.join(
+                    '{0}={1}'.format(key, value) for key, value in items(info)
+                )
                 detail = '... -> key i'
                 detail = '... -> key i'
             infowin = abbr(info,
             infowin = abbr(info,
                            self.screen_width - len(self.selected_str) - 2,
                            self.screen_width - len(self.selected_str) - 2,
@@ -389,7 +400,7 @@ class CursesMonitor(object):  # pragma: no cover
             if detail in infowin:
             if detail in infowin:
                 detailpos = len(infowin) - len(detail)
                 detailpos = len(infowin) - len(detail)
                 win.addstr(my - 5, x + len(self.selected_str) + detailpos,
                 win.addstr(my - 5, x + len(self.selected_str) + detailpos,
-                        detail, curses.A_BOLD)
+                           detail, curses.A_BOLD)
         else:
         else:
             win.addstr(my - 5, x, 'No task selected', curses.A_NORMAL)
             win.addstr(my - 5, x, 'No task selected', curses.A_NORMAL)
 
 
@@ -397,18 +408,22 @@ class CursesMonitor(object):  # pragma: no cover
         if self.workers:
         if self.workers:
             win.addstr(my - 4, x, self.online_str, curses.A_BOLD)
             win.addstr(my - 4, x, self.online_str, curses.A_BOLD)
             win.addstr(my - 4, x + len(self.online_str),
             win.addstr(my - 4, x + len(self.online_str),
-                    ', '.join(sorted(self.workers)), curses.A_NORMAL)
+                       ', '.join(sorted(self.workers)), curses.A_NORMAL)
         else:
         else:
             win.addstr(my - 4, x, 'No workers discovered.')
             win.addstr(my - 4, x, 'No workers discovered.')
 
 
         # Info
         # Info
         win.addstr(my - 3, x, self.info_str, curses.A_BOLD)
         win.addstr(my - 3, x, self.info_str, curses.A_BOLD)
-        win.addstr(my - 3, x + len(self.info_str),
-                STATUS_SCREEN.format(s=self.state,
-                    w_alive=len([w for w in values(self.state.workers)
-                                    if w.alive]),
-                    w_all=len(self.state.workers)),
-                curses.A_DIM)
+        win.addstr(
+            my - 3, x + len(self.info_str),
+            STATUS_SCREEN.format(
+                s=self.state,
+                w_alive=len([w for w in values(self.state.workers)
+                             if w.alive]),
+                w_all=len(self.state.workers),
+            ),
+            curses.A_DIM,
+        )
 
 
         # Help
         # Help
         self.safe_add_str(my - 2, x, self.help_title, curses.A_BOLD)
         self.safe_add_str(my - 2, x, self.help_title, curses.A_BOLD)
@@ -461,9 +476,8 @@ class CursesMonitor(object):  # pragma: no cover
 
 
     @property
     @property
     def workers(self):
     def workers(self):
-        return [hostname
-                    for hostname, w in items(self.state.workers)
-                        if w.alive]
+        return [hostname for hostname, w in items(self.state.workers)
+                if w.alive]
 
 
 
 
 class DisplayThread(threading.Thread):  # pragma: no cover
 class DisplayThread(threading.Thread):  # pragma: no cover
@@ -483,7 +497,7 @@ def capture_events(app, state, display):  # pragma: no cover
 
 
     def on_connection_error(exc, interval):
     def on_connection_error(exc, interval):
         print('Connection Error: {0!r}. Retry in {1}s.'.format(
         print('Connection Error: {0!r}. Retry in {1}s.'.format(
-                exc, interval), file=sys.stderr)
+            exc, interval), file=sys.stderr)
 
 
     while 1:
     while 1:
         print('-> evtop: starting capture...', file=sys.stderr)
         print('-> evtop: starting capture...', file=sys.stderr)

+ 21 - 18
celery/events/dumper.py

@@ -39,34 +39,37 @@ class Dumper(object):
     def say(self, msg):
     def say(self, msg):
         print(msg, file=self.out)
         print(msg, file=self.out)
 
 
-    def on_event(self, event):
-        timestamp = datetime.utcfromtimestamp(event.pop('timestamp'))
-        type = event.pop('type').lower()
-        hostname = event.pop('hostname')
+    def on_event(self, ev):
+        timestamp = datetime.utcfromtimestamp(ev.pop('timestamp'))
+        type = ev.pop('type').lower()
+        hostname = ev.pop('hostname')
         if type.startswith('task-'):
         if type.startswith('task-'):
-            uuid = event.pop('uuid')
+            uuid = ev.pop('uuid')
             if type in ('task-received', 'task-sent'):
             if type in ('task-received', 'task-sent'):
                 task = TASK_NAMES[uuid] = '{0}({1}) args={2} kwargs={3}' \
                 task = TASK_NAMES[uuid] = '{0}({1}) args={2} kwargs={3}' \
-                    .format(
-                        event.pop('name'), uuid,
-                        event.pop('args'),
-                        event.pop('kwargs'))
+                    .format(ev.pop('name'), uuid,
+                            ev.pop('args'),
+                            ev.pop('kwargs'))
             else:
             else:
                 task = TASK_NAMES.get(uuid, '')
                 task = TASK_NAMES.get(uuid, '')
             return self.format_task_event(hostname, timestamp,
             return self.format_task_event(hostname, timestamp,
-                                          type, task, event)
-        fields = ', '.join('{0}={1}'.format(key, event[key])
-                        for key in sorted(event))
+                                          type, task, ev)
+        fields = ', '.join(
+            '{0}={1}'.format(key, ev[key]) for key in sorted(ev)
+        )
         sep = fields and ':' or ''
         sep = fields and ':' or ''
-        self.say('{0} [{1}] {2}{3} {4}'.format(hostname, timestamp,
-                                            humanize_type(type), sep, fields))
+        self.say('{0} [{1}] {2}{3} {4}'.format(
+            hostname, timestamp, humanize_type(type), sep, fields),
+        )
 
 
     def format_task_event(self, hostname, timestamp, type, task, event):
     def format_task_event(self, hostname, timestamp, type, task, event):
-        fields = ', '.join('{0}={1}'.format(key, event[key])
-                        for key in sorted(event))
+        fields = ', '.join(
+            '{0}={1}'.format(key, event[key]) for key in sorted(event)
+        )
         sep = fields and ':' or ''
         sep = fields and ':' or ''
-        self.say('{0} [{1}] {2}{3} {4} {5}'.format(hostname, timestamp,
-                    humanize_type(type), sep, task, fields))
+        self.say('{0} [{1}] {2}{3} {4} {5}'.format(
+            hostname, timestamp, humanize_type(type), sep, task, fields),
+        )
 
 
 
 
 def evdump(app=None, out=sys.stdout):
 def evdump(app=None, out=sys.stdout):

+ 3 - 3
celery/events/snapshot.py

@@ -35,7 +35,7 @@ class Polaroid(object):
     _ctref = None
     _ctref = None
 
 
     def __init__(self, state, freq=1.0, maxrate=None,
     def __init__(self, state, freq=1.0, maxrate=None,
-            cleanup_freq=3600.0, timer=None, app=None):
+                 cleanup_freq=3600.0, timer=None, app=None):
         self.app = app_or_default(app)
         self.app = app_or_default(app)
         self.state = state
         self.state = state
         self.freq = freq
         self.freq = freq
@@ -86,7 +86,7 @@ class Polaroid(object):
 
 
 
 
 def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
 def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
-        logfile=None, pidfile=None, timer=None, app=None):
+          logfile=None, pidfile=None, timer=None, app=None):
     app = app_or_default(app)
     app = app_or_default(app)
 
 
     if pidfile:
     if pidfile:
@@ -95,7 +95,7 @@ def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
     app.log.setup_logging_subsystem(loglevel, logfile)
     app.log.setup_logging_subsystem(loglevel, logfile)
 
 
     print('-> evcam: Taking snapshots with {0} (every {1} secs.)'.format(
     print('-> evcam: Taking snapshots with {0} (every {1} secs.)'.format(
-                camera, freq))
+        camera, freq))
     state = app.events.State()
     state = app.events.State()
     cam = instantiate(camera, state, app=app, freq=freq,
     cam = instantiate(camera, state, app=app, freq=freq,
                       maxrate=maxrate, timer=timer)
                       maxrate=maxrate, timer=timer)

+ 17 - 12
celery/events/state.py

@@ -51,7 +51,7 @@ warn = logger.warn
 
 
 
 
 def heartbeat_expires(timestamp, freq=60,
 def heartbeat_expires(timestamp, freq=60,
-        expire_window=HEARTBEAT_EXPIRE_WINDOW):
+                      expire_window=HEARTBEAT_EXPIRE_WINDOW):
     return timestamp + freq * (expire_window / 1e2)
     return timestamp + freq * (expire_window / 1e2)
 
 
 
 
@@ -275,7 +275,7 @@ class State(object):
     task_count = 0
     task_count = 0
 
 
     def __init__(self, callback=None,
     def __init__(self, callback=None,
-            max_workers_in_memory=5000, max_tasks_in_memory=10000):
+                 max_workers_in_memory=5000, max_tasks_in_memory=10000):
         self.max_workers_in_memory = max_workers_in_memory
         self.max_workers_in_memory = max_workers_in_memory
         self.max_tasks_in_memory = 10000
         self.max_tasks_in_memory = 10000
         self.workers = LRUCache(limit=self.max_workers_in_memory)
         self.workers = LRUCache(limit=self.max_workers_in_memory)
@@ -303,8 +303,9 @@ class State(object):
 
 
     def _clear_tasks(self, ready=True):
     def _clear_tasks(self, ready=True):
         if ready:
         if ready:
-            in_progress = dict((uuid, task) for uuid, task in self.itertasks()
-                                if task.state not in states.READY_STATES)
+            in_progress = dict(
+                (uuid, task) for uuid, task in self.itertasks()
+                if task.state not in states.READY_STATES)
             self.tasks.clear()
             self.tasks.clear()
             self.tasks.update(in_progress)
             self.tasks.update(in_progress)
         else:
         else:
@@ -332,7 +333,7 @@ class State(object):
             return worker, False
             return worker, False
         except KeyError:
         except KeyError:
             worker = self.workers[hostname] = Worker(
             worker = self.workers[hostname] = Worker(
-                    hostname=hostname, **kwargs)
+                hostname=hostname, **kwargs)
             return worker, True
             return worker, True
 
 
     def get_or_create_task(self, uuid):
     def get_or_create_task(self, uuid):
@@ -416,17 +417,21 @@ class State(object):
         Returns a list of ``(uuid, Task)`` tuples.
         Returns a list of ``(uuid, Task)`` tuples.
 
 
         """
         """
-        return islice(((uuid, task)
-                            for uuid, task in self.tasks_by_time()
-                                if task.name == name), 0, limit)
+        return islice(
+            ((uuid, task) for uuid, task in self.tasks_by_time()
+             if task.name == name),
+            0, limit,
+        )
 
 
     def tasks_by_worker(self, hostname, limit=None):
     def tasks_by_worker(self, hostname, limit=None):
         """Get all tasks by worker.
         """Get all tasks by worker.
 
 
         """
         """
-        return islice(((uuid, task)
-                        for uuid, task in self.tasks_by_time()
-                            if task.worker.hostname == hostname), 0, limit)
+        return islice(
+            ((uuid, task) for uuid, task in self.tasks_by_time()
+             if task.worker.hostname == hostname),
+            0, limit,
+        )
 
 
     def task_types(self):
     def task_types(self):
         """Returns a list of all seen task types."""
         """Returns a list of all seen task types."""
@@ -438,7 +443,7 @@ class State(object):
 
 
     def __repr__(self):
     def __repr__(self):
         return '<State: events={0.event_count} tasks={0.task_count}>' \
         return '<State: events={0.event_count} tasks={0.task_count}>' \
-                    .format(self)
+            .format(self)
 
 
 
 
 state = State()
 state = State()

+ 6 - 6
celery/five.py

@@ -125,7 +125,7 @@ def with_metaclass(Type, skip_attrs=set(['__dict__', '__weakref__'])):
 
 
     def _clone_with_metaclass(Class):
     def _clone_with_metaclass(Class):
         attrs = dict((key, value) for key, value in items(vars(Class))
         attrs = dict((key, value) for key, value in items(vars(Class))
-                        if key not in skip_attrs)
+                     if key not in skip_attrs)
         return Type(Class.__name__, Class.__bases__, attrs)
         return Type(Class.__name__, Class.__bases__, attrs)
 
 
     return _clone_with_metaclass
     return _clone_with_metaclass
@@ -296,20 +296,20 @@ class MagicModule(ModuleType):
         return list(set(self.__all__) | DEFAULT_ATTRS)
         return list(set(self.__all__) | DEFAULT_ATTRS)
 
 
 
 
-def create_module(name, attrs, cls_attrs=None, pkg=None, base=MagicModule,
-        prepare_attr=None):
+def create_module(name, attrs, cls_attrs=None, pkg=None,
+                  base=MagicModule, prepare_attr=None):
     fqdn = '.'.join([pkg.__name__, name]) if pkg else name
     fqdn = '.'.join([pkg.__name__, name]) if pkg else name
     cls_attrs = {} if cls_attrs is None else cls_attrs
     cls_attrs = {} if cls_attrs is None else cls_attrs
 
 
     attrs = dict((attr_name, prepare_attr(attr) if prepare_attr else attr)
     attrs = dict((attr_name, prepare_attr(attr) if prepare_attr else attr)
-                    for attr_name, attr in attrs.items())
+                 for attr_name, attr in attrs.items())
     module = sys.modules[fqdn] = type(name, (base, ), cls_attrs)(fqdn)
     module = sys.modules[fqdn] = type(name, (base, ), cls_attrs)(fqdn)
     module.__dict__.update(attrs)
     module.__dict__.update(attrs)
     return module
     return module
 
 
 
 
 def recreate_module(name, compat_modules=(), by_module={}, direct={},
 def recreate_module(name, compat_modules=(), by_module={}, direct={},
-        base=MagicModule, **attrs):
+                    base=MagicModule, **attrs):
     old_module = sys.modules[name]
     old_module = sys.modules[name]
     origins = get_origins(by_module)
     origins = get_origins(by_module)
     compat_modules = COMPAT_MODULES.get(name, ())
     compat_modules = COMPAT_MODULES.get(name, ())
@@ -321,7 +321,7 @@ def recreate_module(name, compat_modules=(), by_module={}, direct={},
                                 compat_modules, origins, direct, attrs])))))
                                 compat_modules, origins, direct, attrs])))))
     new_module = create_module(name, attrs, cls_attrs=cattrs, base=base)
     new_module = create_module(name, attrs, cls_attrs=cattrs, base=base)
     new_module.__dict__.update(dict((mod, get_compat_module(new_module, mod))
     new_module.__dict__.update(dict((mod, get_compat_module(new_module, mod))
-                                     for mod in compat_modules))
+                               for mod in compat_modules))
     return old_module, new_module
     return old_module, new_module
 
 
 
 

+ 1 - 1
celery/loaders/__init__.py

@@ -24,7 +24,7 @@ def get_loader_cls(loader):
 
 
 
 
 @deprecated(deprecation='2.5', removal='4.0',
 @deprecated(deprecation='2.5', removal='4.0',
-        alternative='celery.current_app.loader')
+            alternative='celery.current_app.loader')
 def current_loader():
 def current_loader():
     return current_app.loader
     return current_app.loader
 
 

+ 26 - 24
celery/loaders/base.py

@@ -114,15 +114,19 @@ class BaseLoader(object):
         return importlib.import_module(module, package=package)
         return importlib.import_module(module, package=package)
 
 
     def import_from_cwd(self, module, imp=None, package=None):
     def import_from_cwd(self, module, imp=None, package=None):
-        return import_from_cwd(module,
-                self.import_module if imp is None else imp,
-                package=package)
+        return import_from_cwd(
+            module,
+            self.import_module if imp is None else imp,
+            package=package,
+        )
 
 
     def import_default_modules(self):
     def import_default_modules(self):
-        return [self.import_task_module(m)
-            for m in set(maybe_list(self.app.conf.CELERY_IMPORTS))
-                   | set(maybe_list(self.app.conf.CELERY_INCLUDE))
-                   | self.builtin_modules]
+        return [
+            self.import_task_module(m) for m in (
+                set(maybe_list(self.app.conf.CELERY_IMPORTS))
+                | set(maybe_list(self.app.conf.CELERY_INCLUDE))
+                | self.builtin_modules)
+        ]
 
 
     def init_worker(self):
     def init_worker(self):
         if not self.worker_initialized:
         if not self.worker_initialized:
@@ -142,7 +146,7 @@ class BaseLoader(object):
             if silent:
             if silent:
                 return False
                 return False
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
-                    self.error_envvar_not_set.format(module_name))
+                self.error_envvar_not_set.format(module_name))
         return self.config_from_object(module_name, silent=silent)
         return self.config_from_object(module_name, silent=silent)
 
 
     def config_from_object(self, obj, silent=False):
     def config_from_object(self, obj, silent=False):
@@ -166,25 +170,23 @@ class BaseLoader(object):
             self.find_module(name)
             self.find_module(name)
         except NotAPackage:
         except NotAPackage:
             if name.endswith('.py'):
             if name.endswith('.py'):
-                reraise(NotAPackage, NotAPackage(
-                        CONFIG_WITH_SUFFIX.format(
-                            module=name,
-                            suggest=name[:-3])), sys.exc_info()[2])
-            reraise(NotAPackage, NotAPackage(
-                    CONFIG_INVALID_NAME.format(
-                        module=name)), sys.exc_info()[2])
+                reraise(NotAPackage, NotAPackage(CONFIG_WITH_SUFFIX.format(
+                    module=name, suggest=name[:-3])), sys.exc_info()[2])
+            reraise(NotAPackage, NotAPackage(CONFIG_INVALID_NAME.format(
+                module=name)), sys.exc_info()[2])
         else:
         else:
             return self.import_from_cwd(name)
             return self.import_from_cwd(name)
 
 
     def find_module(self, module):
     def find_module(self, module):
         return find_module(module)
         return find_module(module)
 
 
-    def cmdline_config_parser(self, args, namespace='celery',
-                re_type=re.compile(r'\((\w+)\)'),
-                extra_types={'json': anyjson.loads},
-                override_types={'tuple': 'json',
-                                'list': 'json',
-                                'dict': 'json'}):
+    def cmdline_config_parser(
+            self, args, namespace='celery',
+            re_type=re.compile(r'\((\w+)\)'),
+            extra_types={'json': anyjson.loads},
+            override_types={'tuple': 'json',
+                            'list': 'json',
+                            'dict': 'json'}):
         from celery.app.defaults import Option, NAMESPACES
         from celery.app.defaults import Option, NAMESPACES
         namespace = namespace.upper()
         namespace = namespace.upper()
         typemap = dict(Option.typemap, **extra_types)
         typemap = dict(Option.typemap, **extra_types)
@@ -226,9 +228,9 @@ class BaseLoader(object):
         return dict(getarg(arg) for arg in args)
         return dict(getarg(arg) for arg in args)
 
 
     def mail_admins(self, subject, body, fail_silently=False,
     def mail_admins(self, subject, body, fail_silently=False,
-            sender=None, to=None, host=None, port=None,
-            user=None, password=None, timeout=None,
-            use_ssl=False, use_tls=False):
+                    sender=None, to=None, host=None, port=None,
+                    user=None, password=None, timeout=None,
+                    use_ssl=False, use_tls=False):
         message = self.mail.Message(sender=sender, to=to,
         message = self.mail.Message(sender=sender, to=to,
                                     subject=safe_str(subject),
                                     subject=safe_str(subject),
                                     body=safe_str(body))
                                     body=safe_str(body))

+ 1 - 1
celery/loaders/default.py

@@ -33,7 +33,7 @@ class Loader(BaseLoader):
         """Read configuration from :file:`celeryconfig.py` and configure
         """Read configuration from :file:`celeryconfig.py` and configure
         celery and Django so it can be used by regular Python."""
         celery and Django so it can be used by regular Python."""
         configname = os.environ.get('CELERY_CONFIG_MODULE',
         configname = os.environ.get('CELERY_CONFIG_MODULE',
-                                     DEFAULT_CONFIG_MODULE)
+                                    DEFAULT_CONFIG_MODULE)
         try:
         try:
             usercfg = self._import_config_module(configname)
             usercfg = self._import_config_module(configname)
         except ImportError:
         except ImportError:

+ 1 - 1
celery/local.py

@@ -18,7 +18,7 @@ from .five import long_t, string, string_t
 
 
 
 
 def symbol_by_name(name, aliases={}, imp=None, package=None,
 def symbol_by_name(name, aliases={}, imp=None, package=None,
-        sep='.', default=None, **kwargs):
+                   sep='.', default=None, **kwargs):
     """Get symbol by qualified name.
     """Get symbol by qualified name.
 
 
     The name should be the full dot-separated path to the class::
     The name should be the full dot-separated path to the class::

+ 2 - 2
celery/platforms.py

@@ -268,7 +268,7 @@ class DaemonContext(object):
     _is_open = False
     _is_open = False
 
 
     def __init__(self, pidfile=None, workdir=None, umask=None,
     def __init__(self, pidfile=None, workdir=None, umask=None,
-            fake=False, after_chdir=None, **kwargs):
+                 fake=False, after_chdir=None, **kwargs):
         self.workdir = workdir or DAEMON_WORKDIR
         self.workdir = workdir or DAEMON_WORKDIR
         self.umask = DAEMON_UMASK if umask is None else umask
         self.umask = DAEMON_UMASK if umask is None else umask
         self.fake = fake
         self.fake = fake
@@ -453,7 +453,7 @@ def initgroups(uid, gid):
     if hasattr(os, 'initgroups'):  # Python 2.7+
     if hasattr(os, 'initgroups'):  # Python 2.7+
         return os.initgroups(username, gid)
         return os.initgroups(username, gid)
     groups = [gr.gr_gid for gr in grp.getgrall()
     groups = [gr.gr_gid for gr in grp.getgrall()
-                            if username in gr.gr_mem]
+              if username in gr.gr_mem]
     setgroups(groups)
     setgroups(groups)
 
 
 
 

+ 8 - 7
celery/result.py

@@ -31,8 +31,9 @@ def from_serializable(r):
         id = parent = None
         id = parent = None
         res, nodes = r
         res, nodes = r
         if nodes:
         if nodes:
-            return GroupResult(res,
-                        [from_serializable(child) for child in nodes])
+            return GroupResult(
+                res, [from_serializable(child) for child in nodes],
+            )
         if isinstance(res, (list, tuple)):
         if isinstance(res, (list, tuple)):
             id, parent = res[0], res[1]
             id, parent = res[0], res[1]
         return AsyncResult(id, parent=parent)
         return AsyncResult(id, parent=parent)
@@ -65,7 +66,7 @@ class AsyncResult(ResultBase):
     parent = None
     parent = None
 
 
     def __init__(self, id, backend=None, task_name=None,
     def __init__(self, id, backend=None, task_name=None,
-            app=None, parent=None):
+                 app=None, parent=None):
         self.app = app_or_default(app or self.app)
         self.app = app_or_default(app or self.app)
         self.id = id
         self.id = id
         self.backend = backend or self.app.backend
         self.backend = backend or self.app.backend
@@ -123,8 +124,8 @@ class AsyncResult(ResultBase):
                 node.get(propagate=True, timeout=timeout, interval=interval)
                 node.get(propagate=True, timeout=timeout, interval=interval)
 
 
         return self.backend.wait_for(self.id, timeout=timeout,
         return self.backend.wait_for(self.id, timeout=timeout,
-                                              propagate=propagate,
-                                              interval=interval)
+                                     propagate=propagate,
+                                     interval=interval)
     wait = get  # deprecated alias to :meth:`get`.
     wait = get  # deprecated alias to :meth:`get`.
 
 
     def _parents(self):
     def _parents(self):
@@ -438,7 +439,7 @@ class ResultSet(ResultBase):
         """
         """
         elapsed = 0.0
         elapsed = 0.0
         results = OrderedDict((result.id, copy(result))
         results = OrderedDict((result.id, copy(result))
-                                for result in self.results)
+                              for result in self.results)
 
 
         while results:
         while results:
             removed = set()
             removed = set()
@@ -466,7 +467,7 @@ class ResultSet(ResultBase):
 
 
         """
         """
         return (self.join_native if self.supports_native_join else self.join)(
         return (self.join_native if self.supports_native_join else self.join)(
-                    timeout=timeout, propagate=propagate, interval=interval)
+            timeout=timeout, propagate=propagate, interval=interval)
 
 
     def join(self, timeout=None, propagate=True, interval=0.5):
     def join(self, timeout=None, propagate=True, interval=0.5):
         """Gathers the results of all tasks as a list in order.
         """Gathers the results of all tasks as a list in order.

+ 37 - 40
celery/schedules.py

@@ -182,10 +182,11 @@ class crontab_parser(object):
         self.max_ = max_
         self.max_ = max_
         self.min_ = min_
         self.min_ = min_
         self.pats = (
         self.pats = (
-                (re.compile(self._range + self._steps), self._range_steps),
-                (re.compile(self._range), self._expand_range),
-                (re.compile(self._star + self._steps), self._star_steps),
-                (re.compile('^' + self._star + '$'), self._expand_star))
+            (re.compile(self._range + self._steps), self._range_steps),
+            (re.compile(self._range), self._expand_range),
+            (re.compile(self._star + self._steps), self._star_steps),
+            (re.compile('^' + self._star + '$'), self._expand_star),
+        )
 
 
     def parse(self, spec):
     def parse(self, spec):
         acc = set()
         acc = set()
@@ -207,10 +208,9 @@ class crontab_parser(object):
         if len(toks) > 1:
         if len(toks) > 1:
             to = self._expand_number(toks[1])
             to = self._expand_number(toks[1])
             if to < fr:  # Wrap around max_ if necessary
             if to < fr:  # Wrap around max_ if necessary
-                return (
-                    range(fr, self.min_ + self.max_) +
-                    range(self.min_, to + 1)
-                )
+                return range(fr,
+                             self.min_ + self.max_) + range(self.min_,
+                                                            to + 1)
             return range(fr, to + 1)
             return range(fr, to + 1)
         return [fr]
         return [fr]
 
 
@@ -382,9 +382,9 @@ class crontab(schedule):
         def roll_over():
         def roll_over():
             while 1:
             while 1:
                 flag = (datedata.dom == len(days_of_month) or
                 flag = (datedata.dom == len(days_of_month) or
-                            day_out_of_range(datedata.year,
-                                             months_of_year[datedata.moy],
-                                             days_of_month[datedata.dom]))
+                        day_out_of_range(datedata.year,
+                                         months_of_year[datedata.moy],
+                                         days_of_month[datedata.dom]))
                 if flag:
                 if flag:
                     datedata.dom = 0
                     datedata.dom = 0
                     datedata.moy += 1
                     datedata.moy += 1
@@ -404,11 +404,12 @@ class crontab(schedule):
                 datedata.moy = 0
                 datedata.moy = 0
         roll_over()
         roll_over()
 
 
-        while not datetime(
-                year=datedata.year,
-                month=months_of_year[datedata.moy],
-                day=days_of_month[datedata.dom]) \
-                    .isoweekday() % 7 in self.day_of_week:
+        while 1:
+            th = datetime(year=datedata.year,
+                          month=months_of_year[datedata.moy],
+                          day=days_of_month[datedata.dom])
+            if th.isoweekday() % 7 in self.day_of_week:
+                break
             datedata.dom += 1
             datedata.dom += 1
             roll_over()
             roll_over()
 
 
@@ -421,7 +422,7 @@ class crontab(schedule):
                     microsecond=0)
                     microsecond=0)
 
 
     def __init__(self, minute='*', hour='*', day_of_week='*',
     def __init__(self, minute='*', hour='*', day_of_week='*',
-            day_of_month='*', month_of_year='*', nowfun=None):
+                 day_of_month='*', month_of_year='*', nowfun=None):
         self._orig_minute = minute
         self._orig_minute = minute
         self._orig_hour = hour
         self._orig_hour = hour
         self._orig_day_of_week = day_of_week
         self._orig_day_of_week = day_of_week
@@ -438,12 +439,13 @@ class crontab(schedule):
         return (self.nowfun or self.app.now)()
         return (self.nowfun or self.app.now)()
 
 
     def __repr__(self):
     def __repr__(self):
-        return ('<crontab: %s %s %s %s %s (m/h/d/dM/MY)>' %
-                        (_weak_bool(self._orig_minute) or '*',
-                         _weak_bool(self._orig_hour) or '*',
-                         _weak_bool(self._orig_day_of_week) or '*',
-                         _weak_bool(self._orig_day_of_month) or '*',
-                         _weak_bool(self._orig_month_of_year) or '*'))
+        return ('<crontab: %s %s %s %s %s (m/h/d/dM/MY)>' % (
+            _weak_bool(self._orig_minute) or '*',
+            _weak_bool(self._orig_hour) or '*',
+            _weak_bool(self._orig_day_of_week) or '*',
+            _weak_bool(self._orig_day_of_month) or '*',
+            _weak_bool(self._orig_month_of_year) or '*',
+        ))
 
 
     def __reduce__(self):
     def __reduce__(self):
         return (self.__class__, (self._orig_minute,
         return (self.__class__, (self._orig_minute,
@@ -457,39 +459,34 @@ class crontab(schedule):
         dow_num = last_run_at.isoweekday() % 7  # Sunday is day 0, not day 7
         dow_num = last_run_at.isoweekday() % 7  # Sunday is day 0, not day 7
 
 
         execute_this_date = (last_run_at.month in self.month_of_year and
         execute_this_date = (last_run_at.month in self.month_of_year and
-                                last_run_at.day in self.day_of_month and
-                                    dow_num in self.day_of_week)
+                             last_run_at.day in self.day_of_month and
+                             dow_num in self.day_of_week)
 
 
         execute_this_hour = (execute_this_date and
         execute_this_hour = (execute_this_date and
-                                last_run_at.hour in self.hour and
-                                    last_run_at.minute < max(self.minute))
+                             last_run_at.hour in self.hour and
+                             last_run_at.minute < max(self.minute))
 
 
         if execute_this_hour:
         if execute_this_hour:
             next_minute = min(minute for minute in self.minute
             next_minute = min(minute for minute in self.minute
-                                        if minute > last_run_at.minute)
-            delta = ffwd(minute=next_minute,
-                         second=0,
-                         microsecond=0)
+                              if minute > last_run_at.minute)
+            delta = ffwd(minute=next_minute, second=0, microsecond=0)
         else:
         else:
             next_minute = min(self.minute)
             next_minute = min(self.minute)
             execute_today = (execute_this_date and
             execute_today = (execute_this_date and
-                                last_run_at.hour < max(self.hour))
+                             last_run_at.hour < max(self.hour))
 
 
             if execute_today:
             if execute_today:
                 next_hour = min(hour for hour in self.hour
                 next_hour = min(hour for hour in self.hour
-                                        if hour > last_run_at.hour)
-                delta = ffwd(hour=next_hour,
-                             minute=next_minute,
-                             second=0,
-                             microsecond=0)
+                                if hour > last_run_at.hour)
+                delta = ffwd(hour=next_hour, minute=next_minute,
+                             second=0, microsecond=0)
             else:
             else:
                 next_hour = min(self.hour)
                 next_hour = min(self.hour)
                 all_dom_moy = (self._orig_day_of_month == '*' and
                 all_dom_moy = (self._orig_day_of_month == '*' and
-                                  self._orig_month_of_year == '*')
+                               self._orig_month_of_year == '*')
                 if all_dom_moy:
                 if all_dom_moy:
                     next_day = min([day for day in self.day_of_week
                     next_day = min([day for day in self.day_of_week
-                                        if day > dow_num] or
-                                self.day_of_week)
+                                    if day > dow_num] or self.day_of_week)
                     add_week = next_day == dow_num
                     add_week = next_day == dow_num
 
 
                     delta = ffwd(weeks=add_week and 1 or 0,
                     delta = ffwd(weeks=add_week and 1 or 0,

+ 1 - 1
celery/security/__init__.py

@@ -39,7 +39,7 @@ def disable_untrusted_serializers(whitelist=None):
 
 
 
 
 def setup_security(allowed_serializers=None, key=None, cert=None, store=None,
 def setup_security(allowed_serializers=None, key=None, cert=None, store=None,
-        digest='sha1', serializer='json'):
+                   digest='sha1', serializer='json'):
     """Setup the message-signing serializer.
     """Setup the message-signing serializer.
 
 
     Disables untrusted serializers and if configured to use the ``auth``
     Disables untrusted serializers and if configured to use the ``auth``

+ 16 - 13
celery/security/serialization.py

@@ -29,7 +29,7 @@ def b64decode(s):
 class SecureSerializer(object):
 class SecureSerializer(object):
 
 
     def __init__(self, key=None, cert=None, cert_store=None,
     def __init__(self, key=None, cert=None, cert_store=None,
-            digest='sha1', serializer='json'):
+                 digest='sha1', serializer='json'):
         self._key = key
         self._key = key
         self._cert = cert
         self._cert = cert
         self._cert_store = cert_store
         self._cert_store = cert_store
@@ -42,7 +42,7 @@ class SecureSerializer(object):
         assert self._cert is not None
         assert self._cert is not None
         with reraise_errors('Unable to serialize: {0!r}', (Exception, )):
         with reraise_errors('Unable to serialize: {0!r}', (Exception, )):
             content_type, content_encoding, body = encode(
             content_type, content_encoding, body = encode(
-                    data, serializer=self._serializer)
+                data, serializer=self._serializer)
             # What we sign is the serialized body, not the body itself.
             # What we sign is the serialized body, not the body itself.
             # this way the receiver doesn't have to decode the contents
             # this way the receiver doesn't have to decode the contents
             # to verify the signature (and thus avoiding potential flaws
             # to verify the signature (and thus avoiding potential flaws
@@ -62,26 +62,29 @@ class SecureSerializer(object):
                                        payload['body'])
                                        payload['body'])
             self._cert_store[signer].verify(body, signature, self._digest)
             self._cert_store[signer].verify(body, signature, self._digest)
         return decode(bytes_to_str(body), payload['content_type'],
         return decode(bytes_to_str(body), payload['content_type'],
-                            payload['content_encoding'], force=True)
+                      payload['content_encoding'], force=True)
 
 
     def _pack(self, body, content_type, content_encoding, signer, signature,
     def _pack(self, body, content_type, content_encoding, signer, signature,
-            sep=str_to_bytes('\x00\x01')):
-        fields = sep.join(ensure_bytes(s)
-                for s in [signer, signature, content_type,
-                          content_encoding, body])
+              sep=str_to_bytes('\x00\x01')):
+        fields = sep.join(
+            ensure_bytes(s) for s in [signer, signature, content_type,
+                                      content_encoding, body]
+        )
         return b64encode(fields)
         return b64encode(fields)
 
 
     def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
     def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
         values = b64decode(ensure_bytes(payload)).split(sep)
         values = b64decode(ensure_bytes(payload)).split(sep)
-        return {'signer': bytes_to_str(values[0]),
-                'signature': ensure_bytes(values[1]),
-                'content_type': bytes_to_str(values[2]),
-                'content_encoding': bytes_to_str(values[3]),
-                'body': ensure_bytes(values[4])}
+        return {
+            'signer': bytes_to_str(values[0]),
+            'signature': ensure_bytes(values[1]),
+            'content_type': bytes_to_str(values[2]),
+            'content_encoding': bytes_to_str(values[3]),
+            'body': ensure_bytes(values[4]),
+        }
 
 
 
 
 def register_auth(key=None, cert=None, store=None, digest='sha1',
 def register_auth(key=None, cert=None, store=None, digest='sha1',
-        serializer='json'):
+                  serializer='json'):
     """register security serializer"""
     """register security serializer"""
     s = SecureSerializer(key and PrivateKey(key),
     s = SecureSerializer(key and PrivateKey(key),
                          cert and Certificate(cert),
                          cert and Certificate(cert),

+ 2 - 1
celery/task/__init__.py

@@ -38,7 +38,8 @@ class module(MagicModule):
         return self.task(*args, **kwargs)
         return self.task(*args, **kwargs)
 
 
 
 
-old_module, new_module = recreate_module(__name__,  # pragma: no cover
+old_module, new_module = recreate_module(  # pragma: no cover
+    __name__,
     by_module={
     by_module={
         'celery.task.base': ['BaseTask', 'Task', 'PeriodicTask',
         'celery.task.base': ['BaseTask', 'Task', 'PeriodicTask',
                              'task', 'periodic_task'],
                              'task', 'periodic_task'],

+ 13 - 11
celery/task/base.py

@@ -92,7 +92,7 @@ class Task(BaseTask):
         return self._get_app().connection()
         return self._get_app().connection()
 
 
     def get_publisher(self, connection=None, exchange=None,
     def get_publisher(self, connection=None, exchange=None,
-            exchange_type=None, **options):
+                      exchange_type=None, **options):
         """Deprecated method to get the task publisher (now called producer).
         """Deprecated method to get the task publisher (now called producer).
 
 
         Should be replaced with :class:`@amqp.TaskProducer`:
         Should be replaced with :class:`@amqp.TaskProducer`:
@@ -108,9 +108,11 @@ class Task(BaseTask):
         if exchange_type is None:
         if exchange_type is None:
             exchange_type = self.exchange_type
             exchange_type = self.exchange_type
         connection = connection or self.establish_connection()
         connection = connection or self.establish_connection()
-        return self._get_app().amqp.TaskProducer(connection,
-                exchange=exchange and Exchange(exchange, exchange_type),
-                routing_key=self.routing_key, **options)
+        return self._get_app().amqp.TaskProducer(
+            connection,
+            exchange=exchange and Exchange(exchange, exchange_type),
+            routing_key=self.routing_key, **options
+        )
 
 
     @classmethod
     @classmethod
     def get_consumer(self, connection=None, queues=None, **kwargs):
     def get_consumer(self, connection=None, queues=None, **kwargs):
@@ -139,19 +141,19 @@ class PeriodicTask(Task):
     def __init__(self):
     def __init__(self):
         if not hasattr(self, 'run_every'):
         if not hasattr(self, 'run_every'):
             raise NotImplementedError(
             raise NotImplementedError(
-                    'Periodic tasks must have a run_every attribute')
+                'Periodic tasks must have a run_every attribute')
         self.run_every = maybe_schedule(self.run_every, self.relative)
         self.run_every = maybe_schedule(self.run_every, self.relative)
         super(PeriodicTask, self).__init__()
         super(PeriodicTask, self).__init__()
 
 
     @classmethod
     @classmethod
     def on_bound(cls, app):
     def on_bound(cls, app):
         app.conf.CELERYBEAT_SCHEDULE[cls.name] = {
         app.conf.CELERYBEAT_SCHEDULE[cls.name] = {
-                'task': cls.name,
-                'schedule': cls.run_every,
-                'args': (),
-                'kwargs': {},
-                'options': cls.options or {},
-                'relative': cls.relative,
+            'task': cls.name,
+            'schedule': cls.run_every,
+            'args': (),
+            'kwargs': {},
+            'options': cls.options or {},
+            'relative': cls.relative,
         }
         }
 
 
 
 

+ 2 - 2
celery/task/http.py

@@ -49,7 +49,7 @@ else:
         """With a dict's items() tuple return a new dict with any utf-8
         """With a dict's items() tuple return a new dict with any utf-8
         keys/values encoded."""
         keys/values encoded."""
         return dict((key.encode('utf-8'), maybe_utf8(value))
         return dict((key.encode('utf-8'), maybe_utf8(value))
-                        for key, value in tup)
+                    for key, value in tup)
 
 
 
 
 class InvalidResponseError(Exception):
 class InvalidResponseError(Exception):
@@ -72,7 +72,7 @@ def extract_response(raw_response, loads=anyjson.loads):
         payload = loads(raw_response)
         payload = loads(raw_response)
     except ValueError as exc:
     except ValueError as exc:
         reraise(InvalidResponseError, InvalidResponseError(
         reraise(InvalidResponseError, InvalidResponseError(
-                str(exc)), sys.exc_info()[2])
+            str(exc)), sys.exc_info()[2])
 
 
     status = payload['status']
     status = payload['status']
     if status == 'success':
     if status == 'success':

+ 1 - 1
celery/task/sets.py

@@ -57,7 +57,7 @@ class TaskSet(list):
 
 
     def _async_results(self, taskset_id, publisher):
     def _async_results(self, taskset_id, publisher):
         return [task.apply_async(taskset_id=taskset_id, publisher=publisher)
         return [task.apply_async(taskset_id=taskset_id, publisher=publisher)
-                    for task in self]
+                for task in self]
 
 
     def apply(self, taskset_id=None):
     def apply(self, taskset_id=None):
         """Applies the TaskSet locally by blocking until all tasks return."""
         """Applies the TaskSet locally by blocking until all tasks return."""

+ 2 - 2
celery/task/trace.py

@@ -112,7 +112,7 @@ class TraceInfo(object):
 
 
 
 
 def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
 def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
-        Info=TraceInfo, eager=False, propagate=False):
+                 Info=TraceInfo, eager=False, propagate=False):
     """Builts a function that tracing the tasks execution; catches all
     """Builts a function that tracing the tasks execution; catches all
     exceptions, and saves the state and result of the task execution
     exceptions, and saves the state and result of the task execution
     to the result backend.
     to the result backend.
@@ -281,7 +281,7 @@ def _fast_trace_task(task, uuid, args, kwargs, request={}):
 def eager_trace_task(task, uuid, args, kwargs, request=None, **opts):
 def eager_trace_task(task, uuid, args, kwargs, request=None, **opts):
     opts.setdefault('eager', True)
     opts.setdefault('eager', True)
     return build_tracer(task.name, task, **opts)(
     return build_tracer(task.name, task, **opts)(
-            uuid, args, kwargs, request)
+        uuid, args, kwargs, request)
 
 
 
 
 def report_internal_error(task, exc):
 def report_internal_error(task, exc):

+ 2 - 2
celery/tests/__init__.py

@@ -45,7 +45,7 @@ def teardown():
     # Make sure there are no remaining threads at shutdown.
     # Make sure there are no remaining threads at shutdown.
     import threading
     import threading
     remaining_threads = [thread for thread in threading.enumerate()
     remaining_threads = [thread for thread in threading.enumerate()
-                            if thread.getName() != 'MainThread']
+                         if thread.getName() != 'MainThread']
     if remaining_threads:
     if remaining_threads:
         sys.stderr.write(
         sys.stderr.write(
             '\n\n**WARNING**: Remaining threads at teardown: %r...\n' % (
             '\n\n**WARNING**: Remaining threads at teardown: %r...\n' % (
@@ -69,7 +69,7 @@ def find_distribution_modules(name=__name__, file=__file__):
 
 
 
 
 def import_all_modules(name=__name__, file=__file__,
 def import_all_modules(name=__name__, file=__file__,
-        skip=['celery.decorators', 'celery.contrib.batches']):
+                       skip=['celery.decorators', 'celery.contrib.batches']):
     for module in find_distribution_modules(name, file):
     for module in find_distribution_modules(name, file):
         if module not in skip:
         if module not in skip:
             try:
             try:

+ 2 - 2
celery/tests/app/test_amqp.py

@@ -106,8 +106,8 @@ class test_PublisherPool(AppCase):
 class test_Queues(AppCase):
 class test_Queues(AppCase):
 
 
     def test_queues_format(self):
     def test_queues_format(self):
-        prev, self.app.amqp.queues._consume_from = \
-                self.app.amqp.queues._consume_from, {}
+        prev, self.app.amqp.queues._consume_from = (
+            self.app.amqp.queues._consume_from, {})
         try:
         try:
             self.assertEqual(self.app.amqp.queues.format(), '')
             self.assertEqual(self.app.amqp.queues.format(), '')
         finally:
         finally:

+ 18 - 15
celery/tests/app/test_app.py

@@ -34,8 +34,8 @@ class Object(object):
 
 
 def _get_test_config():
 def _get_test_config():
     return dict((key, getattr(config, key))
     return dict((key, getattr(config, key))
-                    for key in dir(config)
-                        if key.isupper() and not key.startswith('_'))
+                for key in dir(config)
+                if key.isupper() and not key.startswith('_'))
 
 
 test_config = _get_test_config()
 test_config = _get_test_config()
 
 
@@ -195,7 +195,7 @@ class test_App(Case):
 
 
         app = Celery(set_as_current=False)
         app = Celery(set_as_current=False)
         app.conf.CELERY_ANNOTATIONS = {
         app.conf.CELERY_ANNOTATIONS = {
-                adX.name: {'@__call__': deco}
+            adX.name: {'@__call__': deco}
         }
         }
         adX.bind(app)
         adX.bind(app)
         self.assertIs(adX.app, app)
         self.assertIs(adX.app, app)
@@ -275,8 +275,7 @@ class test_App(Case):
             def execute_from_commandline(self, argv):
             def execute_from_commandline(self, argv):
                 return argv
                 return argv
 
 
-        prev, worker_bin.worker = \
-                worker_bin.worker, worker
+        prev, worker_bin.worker = worker_bin.worker, worker
         try:
         try:
             ret = self.app.worker_main(argv=['--version'])
             ret = self.app.worker_main(argv=['--version'])
             self.assertListEqual(ret, ['--version'])
             self.assertListEqual(ret, ['--version'])
@@ -378,16 +377,19 @@ class test_App(Case):
         self.assertTrue(self.app.mail_admins('Subject', 'Body'))
         self.assertTrue(self.app.mail_admins('Subject', 'Body'))
 
 
     def test_amqp_get_broker_info(self):
     def test_amqp_get_broker_info(self):
-        self.assertDictContainsSubset({'hostname': 'localhost',
-                                       'userid': 'guest',
-                                       'password': 'guest',
-                                       'virtual_host': '/'},
-                        self.app.connection('pyamqp://').info())
+        self.assertDictContainsSubset(
+            {'hostname': 'localhost',
+             'userid': 'guest',
+             'password': 'guest',
+             'virtual_host': '/'},
+            self.app.connection('pyamqp://').info(),
+        )
         self.app.conf.BROKER_PORT = 1978
         self.app.conf.BROKER_PORT = 1978
         self.app.conf.BROKER_VHOST = 'foo'
         self.app.conf.BROKER_VHOST = 'foo'
-        self.assertDictContainsSubset({'port': 1978,
-                                       'virtual_host': 'foo'},
-        self.app.connection('pyamqp://:1978/foo').info())
+        self.assertDictContainsSubset(
+            {'port': 1978, 'virtual_host': 'foo'},
+            self.app.connection('pyamqp://:1978/foo').info(),
+        )
         conn = self.app.connection('pyamqp:////value')
         conn = self.app.connection('pyamqp:////value')
         self.assertDictContainsSubset({'virtual_host': '/value'},
         self.assertDictContainsSubset({'virtual_host': '/value'},
                                       conn.info())
                                       conn.info())
@@ -440,8 +442,9 @@ class test_App(Case):
             chan.close()
             chan.close()
         assert conn.transport_cls == 'memory'
         assert conn.transport_cls == 'memory'
 
 
-        prod = self.app.amqp.TaskProducer(conn,
-                exchange=Exchange('foo_exchange'))
+        prod = self.app.amqp.TaskProducer(
+            conn, exchange=Exchange('foo_exchange'),
+        )
 
 
         dispatcher = Dispatcher()
         dispatcher = Dispatcher()
         self.assertTrue(prod.publish_task('footask', (), {},
         self.assertTrue(prod.publish_task('footask', (), {},

+ 1 - 1
celery/tests/app/test_beat.py

@@ -266,7 +266,7 @@ class test_Scheduler(Case):
         nums = [600, 300, 650, 120, 250, 36]
         nums = [600, 300, 650, 120, 250, 36]
         s = dict(('test_ticks%s' % i,
         s = dict(('test_ticks%s' % i,
                  {'schedule': mocked_schedule(False, j)})
                  {'schedule': mocked_schedule(False, j)})
-                    for i, j in enumerate(nums))
+                 for i, j in enumerate(nums))
         scheduler.update_from_dict(s)
         scheduler.update_from_dict(s)
         self.assertEqual(scheduler.tick(), min(nums))
         self.assertEqual(scheduler.tick(), min(nums))
 
 

+ 3 - 2
celery/tests/app/test_builtins.py

@@ -66,8 +66,9 @@ class test_chunks(Case):
         def chunks_mul(l):
         def chunks_mul(l):
             return l
             return l
 
 
-        app.tasks['celery.chunks'](chunks_mul,
-                [(2, 2), (4, 4), (8, 8)], 1)
+        app.tasks['celery.chunks'](
+            chunks_mul, [(2, 2), (4, 4), (8, 8)], 1,
+        )
         self.assertTrue(apply_chunks.called)
         self.assertTrue(apply_chunks.called)
 
 
 
 

+ 1 - 1
celery/tests/app/test_celery.py

@@ -14,5 +14,5 @@ class test_celery_package(Case):
 
 
     def test_meta(self):
     def test_meta(self):
         for m in ('__author__', '__contact__', '__homepage__',
         for m in ('__author__', '__contact__', '__homepage__',
-                '__docformat__'):
+                  '__docformat__'):
             self.assertTrue(getattr(celery, m, None))
             self.assertTrue(getattr(celery, m, None))

+ 4 - 3
celery/tests/app/test_control.py

@@ -131,8 +131,9 @@ class test_Broadcast(Case):
 
 
     @with_mock_broadcast
     @with_mock_broadcast
     def test_broadcast_limit(self):
     def test_broadcast_limit(self):
-        self.control.broadcast('foobarbaz1', arguments=[], limit=None,
-                destination=[1, 2, 3])
+        self.control.broadcast(
+            'foobarbaz1', arguments=[], limit=None, destination=[1, 2, 3],
+        )
         self.assertIn('foobarbaz1', MockMailbox.sent)
         self.assertIn('foobarbaz1', MockMailbox.sent)
 
 
     @with_mock_broadcast
     @with_mock_broadcast
@@ -190,6 +191,6 @@ class test_Broadcast(Case):
     def test_revoke_from_resultset(self):
     def test_revoke_from_resultset(self):
         r = self.app.GroupResult(uuid(),
         r = self.app.GroupResult(uuid(),
                                  [self.app.AsyncResult(x)
                                  [self.app.AsyncResult(x)
-                                     for x in [uuid() for i in range(10)]])
+                                  for x in [uuid() for i in range(10)]])
         r.revoke()
         r.revoke()
         self.assertIn('revoke', MockMailbox.sent)
         self.assertIn('revoke', MockMailbox.sent)

+ 16 - 13
celery/tests/app/test_loaders.py

@@ -9,9 +9,9 @@ from mock import Mock, patch
 from celery import loaders
 from celery import loaders
 from celery.app import app_or_default
 from celery.app import app_or_default
 from celery.exceptions import (
 from celery.exceptions import (
-        NotConfigured,
-        ImproperlyConfigured,
-        CPendingDeprecationWarning,
+    NotConfigured,
+    ImproperlyConfigured,
+    CPendingDeprecationWarning,
 )
 )
 from celery.five import items
 from celery.five import items
 from celery.loaders import base
 from celery.loaders import base
@@ -49,15 +49,17 @@ class test_loaders(AppCase):
     def test_get_loader_cls(self):
     def test_get_loader_cls(self):
 
 
         self.assertEqual(loaders.get_loader_cls('default'),
         self.assertEqual(loaders.get_loader_cls('default'),
-                          default.Loader)
+                         default.Loader)
 
 
     def test_current_loader(self):
     def test_current_loader(self):
-        with self.assertWarnsRegex(CPendingDeprecationWarning,
+        with self.assertWarnsRegex(
+                CPendingDeprecationWarning,
                 r'deprecation'):
                 r'deprecation'):
             self.assertIs(loaders.current_loader(), self.app.loader)
             self.assertIs(loaders.current_loader(), self.app.loader)
 
 
     def test_load_settings(self):
     def test_load_settings(self):
-        with self.assertWarnsRegex(CPendingDeprecationWarning,
+        with self.assertWarnsRegex(
+                CPendingDeprecationWarning,
                 r'deprecation'):
                 r'deprecation'):
             self.assertIs(loaders.load_settings(), self.app.conf)
             self.assertIs(loaders.load_settings(), self.app.conf)
 
 
@@ -102,12 +104,13 @@ class test_LoaderBase(Case):
 
 
     def test_import_default_modules(self):
     def test_import_default_modules(self):
         modnames = lambda l: [m.__name__ for m in l]
         modnames = lambda l: [m.__name__ for m in l]
-        prev, self.app.conf.CELERY_IMPORTS = \
-                self.app.conf.CELERY_IMPORTS, ('os', 'sys')
+        prev, self.app.conf.CELERY_IMPORTS = (
+            self.app.conf.CELERY_IMPORTS, ('os', 'sys'))
         try:
         try:
-            self.assertEqual(sorted(modnames(
-                                self.loader.import_default_modules())),
-                            sorted(modnames([os, sys])))
+            self.assertEqual(
+                sorted(modnames(self.loader.import_default_modules())),
+                sorted(modnames([os, sys])),
+            )
         finally:
         finally:
             self.app.conf.CELERY_IMPORTS = prev
             self.app.conf.CELERY_IMPORTS = prev
 
 
@@ -263,8 +266,8 @@ class test_AppLoader(Case):
         self.assertEqual(self.loader.conf['BAR'], 20)
         self.assertEqual(self.loader.conf['BAR'], 20)
 
 
     def test_on_worker_init(self):
     def test_on_worker_init(self):
-        prev, self.app.conf.CELERY_IMPORTS = \
-                self.app.conf.CELERY_IMPORTS, ('subprocess', )
+        prev, self.app.conf.CELERY_IMPORTS = (
+            self.app.conf.CELERY_IMPORTS, ('subprocess', ))
         try:
         try:
             sys.modules.pop('subprocess', None)
             sys.modules.pop('subprocess', None)
             self.loader.init_worker()
             self.loader.init_worker()

+ 7 - 5
celery/tests/app/test_log.py

@@ -164,8 +164,10 @@ class test_default_logger(AppCase):
         Logging._setup = False
         Logging._setup = False
         logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
         logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
                                    root=False, colorize=None)
                                    root=False, colorize=None)
-        self.assertIs(get_handlers(logger)[0].stream, sys.__stderr__,
-                'setup_logger logs to stderr without logfile argument.')
+        self.assertIs(
+            get_handlers(logger)[0].stream, sys.__stderr__,
+            'setup_logger logs to stderr without logfile argument.',
+        )
 
 
     def test_setup_logger_no_handlers_stream(self):
     def test_setup_logger_no_handlers_stream(self):
         l = self.get_logger()
         l = self.get_logger()
@@ -174,7 +176,7 @@ class test_default_logger(AppCase):
         with override_stdouts() as outs:
         with override_stdouts() as outs:
             stdout, stderr = outs
             stdout, stderr = outs
             l = self.setup_logger(logfile=sys.stderr, loglevel=logging.INFO,
             l = self.setup_logger(logfile=sys.stderr, loglevel=logging.INFO,
-                                root=False)
+                                  root=False)
             l.info('The quick brown fox...')
             l.info('The quick brown fox...')
             self.assertIn('The quick brown fox...', stderr.getvalue())
             self.assertIn('The quick brown fox...', stderr.getvalue())
 
 
@@ -195,7 +197,7 @@ class test_default_logger(AppCase):
                 logger.error('foo')
                 logger.error('foo')
                 self.assertIn('foo', sio.getvalue())
                 self.assertIn('foo', sio.getvalue())
                 log.redirect_stdouts_to_logger(logger, stdout=False,
                 log.redirect_stdouts_to_logger(logger, stdout=False,
-                        stderr=False)
+                                               stderr=False)
         finally:
         finally:
             sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
             sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
 
 
@@ -221,7 +223,7 @@ class test_default_logger(AppCase):
 
 
     def test_logging_proxy_recurse_protection(self):
     def test_logging_proxy_recurse_protection(self):
         logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
         logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
-                root=False)
+                                   root=False)
         p = LoggingProxy(logger, loglevel=logging.ERROR)
         p = LoggingProxy(logger, loglevel=logging.ERROR)
         p._thread.recurse_protection = True
         p._thread.recurse_protection = True
         try:
         try:

+ 11 - 8
celery/tests/app/test_routes.py

@@ -78,13 +78,15 @@ class test_MapRoute(RouteCase):
     def test_route_for_task(self):
     def test_route_for_task(self):
         expand = E(current_app.amqp.queues)
         expand = E(current_app.amqp.queues)
         route = routes.MapRoute({mytask.name: b_queue})
         route = routes.MapRoute({mytask.name: b_queue})
-        self.assertDictContainsSubset(b_queue,
-                             expand(route.route_for_task(mytask.name)))
+        self.assertDictContainsSubset(
+            b_queue,
+            expand(route.route_for_task(mytask.name)),
+        )
         self.assertIsNone(route.route_for_task('celery.awesome'))
         self.assertIsNone(route.route_for_task('celery.awesome'))
 
 
     def test_expand_route_not_found(self):
     def test_expand_route_not_found(self):
         expand = E(current_app.amqp.Queues(
         expand = E(current_app.amqp.Queues(
-                    current_app.conf.CELERY_QUEUES, False))
+            current_app.conf.CELERY_QUEUES, False))
         route = routes.MapRoute({'a': {'queue': 'x'}})
         route = routes.MapRoute({'a': {'queue': 'x'}})
         with self.assertRaises(QueueNotFound):
         with self.assertRaises(QueueNotFound):
             expand(route.route_for_task('a'))
             expand(route.route_for_task('a'))
@@ -134,9 +136,11 @@ class test_lookup_route(RouteCase):
                             {mytask.name: {'queue': 'foo'}}))
                             {mytask.name: {'queue': 'foo'}}))
         router = Router(R, current_app.amqp.queues)
         router = Router(R, current_app.amqp.queues)
         self.assertEqual(router.route({}, mytask.name,
         self.assertEqual(router.route({}, mytask.name,
-                          args=[1, 2], kwargs={})['queue'].name, 'foo')
-        self.assertEqual(router.route({}, 'celery.poza')['queue'].name,
-                current_app.conf.CELERY_DEFAULT_QUEUE)
+                         args=[1, 2], kwargs={})['queue'].name, 'foo')
+        self.assertEqual(
+            router.route({}, 'celery.poza')['queue'].name,
+            current_app.conf.CELERY_DEFAULT_QUEUE,
+        )
 
 
 
 
 class test_prepare(Case):
 class test_prepare(Case):
@@ -145,8 +149,7 @@ class test_prepare(Case):
         from celery.datastructures import LRUCache
         from celery.datastructures import LRUCache
         o = object()
         o = object()
         R = [{'foo': 'bar'},
         R = [{'foo': 'bar'},
-                  'celery.datastructures.LRUCache',
-                  o]
+             'celery.datastructures.LRUCache', o]
         p = routes.prepare(R)
         p = routes.prepare(R)
         self.assertIsInstance(p[0], routes.MapRoute)
         self.assertIsInstance(p[0], routes.MapRoute)
         self.assertIsInstance(maybe_promise(p[1]), LRUCache)
         self.assertIsInstance(maybe_promise(p[1]), LRUCache)

+ 6 - 6
celery/tests/backends/test_amqp.py

@@ -169,7 +169,7 @@ class test_AMQPBackend(AppCase):
         r1 = backend.get_task_meta(uuid())
         r1 = backend.get_task_meta(uuid())
         self.assertDictContainsSubset({'status': states.FAILURE,
         self.assertDictContainsSubset({'status': states.FAILURE,
                                        'seq': 3}, r1,
                                        'seq': 3}, r1,
-                                       'FFWDs to the last state')
+                                      'FFWDs to the last state')
 
 
         # Caches last known state.
         # Caches last known state.
         results.put(Message())
         results.put(Message())
@@ -233,11 +233,11 @@ class test_AMQPBackend(AppCase):
 
 
         res = list(b.get_many(tids, timeout=1))
         res = list(b.get_many(tids, timeout=1))
         expected_results = [(tid, {'status': states.SUCCESS,
         expected_results = [(tid, {'status': states.SUCCESS,
-                                    'result': i,
-                                    'traceback': None,
-                                    'task_id': tid,
-                                    'children': None})
-                                for i, tid in enumerate(tids)]
+                                   'result': i,
+                                   'traceback': None,
+                                   'task_id': tid,
+                                   'children': None})
+                            for i, tid in enumerate(tids)]
         self.assertEqual(sorted(res), sorted(expected_results))
         self.assertEqual(sorted(res), sorted(expected_results))
         self.assertDictEqual(b._cache[res[0][0]], res[0][1])
         self.assertDictEqual(b._cache[res[0][0]], res[0][1])
         cached_res = list(b.get_many(tids, timeout=1))
         cached_res = list(b.get_many(tids, timeout=1))

+ 2 - 2
celery/tests/backends/test_base.py

@@ -12,7 +12,7 @@ from celery.result import AsyncResult, GroupResult
 from celery.utils import serialization
 from celery.utils import serialization
 from celery.utils.serialization import subclass_exception
 from celery.utils.serialization import subclass_exception
 from celery.utils.serialization import \
 from celery.utils.serialization import \
-        find_nearest_pickleable_exception as fnpe
+    find_nearest_pickleable_exception as fnpe
 from celery.utils.serialization import UnpickleableExceptionWrapper
 from celery.utils.serialization import UnpickleableExceptionWrapper
 from celery.utils.serialization import get_pickleable_exception as gpe
 from celery.utils.serialization import get_pickleable_exception as gpe
 
 
@@ -47,7 +47,7 @@ class test_serialization(Case):
     def test_create_exception_cls(self):
     def test_create_exception_cls(self):
         self.assertTrue(serialization.create_exception_cls('FooError', 'm'))
         self.assertTrue(serialization.create_exception_cls('FooError', 'm'))
         self.assertTrue(serialization.create_exception_cls('FooError', 'm',
         self.assertTrue(serialization.create_exception_cls('FooError', 'm',
-                                                            KeyError))
+                                                           KeyError))
 
 
 
 
 class test_BaseBackend_interface(Case):
 class test_BaseBackend_interface(Case):

+ 2 - 2
celery/tests/backends/test_cache.py

@@ -122,8 +122,8 @@ class MemcachedClient(DummyClient):
     def set(self, key, value, *args, **kwargs):
     def set(self, key, value, *args, **kwargs):
         if isinstance(key, text_t):
         if isinstance(key, text_t):
             raise MyMemcachedStringEncodingError(
             raise MyMemcachedStringEncodingError(
-                    'Keys must be bytes, not string.  Convert your '
-                    'strings using mystring.encode(charset)!')
+                'Keys must be bytes, not string.  Convert your '
+                'strings using mystring.encode(charset)!')
         return super(MemcachedClient, self).set(key, value, *args, **kwargs)
         return super(MemcachedClient, self).set(key, value, *args, **kwargs)
 
 
 
 

+ 9 - 8
celery/tests/backends/test_cassandra.py

@@ -78,8 +78,7 @@ class test_CassandraBackend(AppCase):
             # no servers raises ImproperlyConfigured
             # no servers raises ImproperlyConfigured
             with self.assertRaises(ImproperlyConfigured):
             with self.assertRaises(ImproperlyConfigured):
                 app.conf.CASSANDRA_SERVERS = None
                 app.conf.CASSANDRA_SERVERS = None
-                mod.CassandraBackend(app=app, keyspace='b',
-                        column_family='c')
+                mod.CassandraBackend(app=app, keyspace='b', column_family='c')
 
 
     def test_reduce(self):
     def test_reduce(self):
         with mock_module('pycassa'):
         with mock_module('pycassa'):
@@ -98,12 +97,14 @@ class test_CassandraBackend(AppCase):
             Get_Column = x._get_column_family = Mock()
             Get_Column = x._get_column_family = Mock()
             get_column = Get_Column.return_value = Mock()
             get_column = Get_Column.return_value = Mock()
             get = get_column.get
             get = get_column.get
-            META = get.return_value = {'task_id': 'task_id',
-                                'status': states.SUCCESS,
-                                'result': '1',
-                                'date_done': 'date',
-                                'traceback': '',
-                                'children': None}
+            META = get.return_value = {
+                'task_id': 'task_id',
+                'status': states.SUCCESS,
+                'result': '1',
+                'date_done': 'date',
+                'traceback': '',
+                'children': None,
+            }
             x.decode = Mock()
             x.decode = Mock()
             x.detailed_mode = False
             x.detailed_mode = False
             meta = x._get_task_meta_for('task_id')
             meta = x._get_task_meta_for('task_id')

+ 6 - 5
celery/tests/backends/test_database.py

@@ -62,11 +62,12 @@ class test_DatabaseBackend(Case):
 
 
     def test_missing_task_meta_is_dict_with_pending(self):
     def test_missing_task_meta_is_dict_with_pending(self):
         tb = DatabaseBackend()
         tb = DatabaseBackend()
-        self.assertDictContainsSubset(
-            {'status': states.PENDING,
-             'task_id': 'xxx-does-not-exist-at-all',
-             'result': None,
-             'traceback': None}, tb.get_task_meta('xxx-does-not-exist-at-all'))
+        self.assertDictContainsSubset({
+            'status': states.PENDING,
+            'task_id': 'xxx-does-not-exist-at-all',
+            'result': None,
+            'traceback': None
+        }, tb.get_task_meta('xxx-does-not-exist-at-all'))
 
 
     def test_mark_as_done(self):
     def test_mark_as_done(self):
         tb = DatabaseBackend()
         tb = DatabaseBackend()

+ 4 - 2
celery/tests/backends/test_mongodb.py

@@ -250,8 +250,10 @@ class test_MongoBackend(AppCase):
         mock_database.__getitem__.assert_called_once_with(MONGODB_COLLECTION)
         mock_database.__getitem__.assert_called_once_with(MONGODB_COLLECTION)
         mock_collection.find_one.assert_called_once_with(
         mock_collection.find_one.assert_called_once_with(
             {'_id': sentinel.taskset_id})
             {'_id': sentinel.taskset_id})
-        self.assertEquals(['date_done', 'result', 'task_id'],
-                list(ret_val.keys()))
+        self.assertEquals(
+            ['date_done', 'result', 'task_id'],
+            list(ret_val.keys()),
+        )
 
 
     @patch('celery.backends.mongodb.MongoBackend._get_database')
     @patch('celery.backends.mongodb.MongoBackend._get_database')
     def test_delete_group(self, mock_get_database):
     def test_delete_group(self, mock_get_database):

+ 4 - 2
celery/tests/backends/test_redis.py

@@ -138,8 +138,10 @@ class test_RedisBackend(Case):
         self.assertEqual(b.expires, 60)
         self.assertEqual(b.expires, 60)
 
 
     def test_on_chord_apply(self):
     def test_on_chord_apply(self):
-        self.Backend().on_chord_apply('group_id', {},
-                result=[AsyncResult(x) for x in [1, 2, 3]])
+        self.Backend().on_chord_apply(
+            'group_id', {},
+            result=[AsyncResult(x) for x in [1, 2, 3]],
+        )
 
 
     def test_mget(self):
     def test_mget(self):
         b = self.MockBackend()
         b = self.MockBackend()

+ 8 - 4
celery/tests/bin/test_amqp.py

@@ -68,10 +68,14 @@ class test_AMQShell(AppCase):
         self.shell.onecmd('queue.delete foo')
         self.shell.onecmd('queue.delete foo')
 
 
     def test_completenames(self):
     def test_completenames(self):
-        self.assertEqual(self.shell.completenames('queue.dec'),
-                ['queue.declare'])
-        self.assertEqual(sorted(self.shell.completenames('declare')),
-                sorted(['queue.declare', 'exchange.declare']))
+        self.assertEqual(
+            self.shell.completenames('queue.dec'),
+            ['queue.declare'],
+        )
+        self.assertEqual(
+            sorted(self.shell.completenames('declare')),
+            sorted(['queue.declare', 'exchange.declare']),
+        )
 
 
     def test_empty_line(self):
     def test_empty_line(self):
         self.shell.emptyline = Mock()
         self.shell.emptyline = Mock()

+ 3 - 2
celery/tests/bin/test_base.py

@@ -86,8 +86,9 @@ class test_Command(AppCase):
         try:
         try:
             cmd = MockCommand()
             cmd = MockCommand()
             cmd.setup_app_from_commandline(['--broker=xyzza://'])
             cmd.setup_app_from_commandline(['--broker=xyzza://'])
-            self.assertEqual(os.environ.get('CELERY_BROKER_URL'),
-                    'xyzza://')
+            self.assertEqual(
+                os.environ.get('CELERY_BROKER_URL'), 'xyzza://',
+            )
         finally:
         finally:
             if prev:
             if prev:
                 os.environ['CELERY_BROKER_URL'] = prev
                 os.environ['CELERY_BROKER_URL'] = prev

+ 3 - 2
celery/tests/bin/test_beat.py

@@ -158,8 +158,9 @@ class test_div(AppCase):
 
 
     def setup(self):
     def setup(self):
         self.prev, beatapp.Beat = beatapp.Beat, MockBeat
         self.prev, beatapp.Beat = beatapp.Beat, MockBeat
-        self.ctx, beat_bin.detached = \
-                beat_bin.detached, MockDaemonContext
+        self.ctx, beat_bin.detached = (
+            beat_bin.detached, MockDaemonContext,
+        )
 
 
     def teardown(self):
     def teardown(self):
         beatapp.Beat = self.prev
         beatapp.Beat = self.prev

+ 8 - 4
celery/tests/bin/test_celery.py

@@ -70,10 +70,14 @@ class test_Command(AppCase):
         self.assertIn('bar', self.cmd.pretty(['foo', 'bar'])[1])
         self.assertIn('bar', self.cmd.pretty(['foo', 'bar'])[1])
 
 
     def test_pretty_dict(self):
     def test_pretty_dict(self):
-        self.assertIn('OK',
-            str(self.cmd.pretty({'ok': 'the quick brown fox'})[0]))
-        self.assertIn('ERROR',
-            str(self.cmd.pretty({'error': 'the quick brown fox'})[0]))
+        self.assertIn(
+            'OK',
+            str(self.cmd.pretty({'ok': 'the quick brown fox'})[0]),
+        )
+        self.assertIn(
+            'ERROR',
+            str(self.cmd.pretty({'error': 'the quick brown fox'})[0]),
+        )
 
 
     def test_pretty(self):
     def test_pretty(self):
         self.assertIn('OK', str(self.cmd.pretty('the quick brown')))
         self.assertIn('OK', str(self.cmd.pretty('the quick brown')))

+ 5 - 4
celery/tests/bin/test_celeryd_detach.py

@@ -25,14 +25,14 @@ if not current_app.IS_WINDOWS:
             context.__exit__ = Mock()
             context.__exit__ = Mock()
 
 
             detach('/bin/boo', ['a', 'b', 'c'], logfile='/var/log',
             detach('/bin/boo', ['a', 'b', 'c'], logfile='/var/log',
-                    pidfile='/var/pid')
+                   pidfile='/var/pid')
             detached.assert_called_with('/var/log', '/var/pid', None, None, 0,
             detached.assert_called_with('/var/log', '/var/pid', None, None, 0,
                                         None, False)
                                         None, False)
             execv.assert_called_with('/bin/boo', ['/bin/boo', 'a', 'b', 'c'])
             execv.assert_called_with('/bin/boo', ['/bin/boo', 'a', 'b', 'c'])
 
 
             execv.side_effect = Exception('foo')
             execv.side_effect = Exception('foo')
-            r = detach('/bin/boo', ['a', 'b', 'c'], logfile='/var/log',
-                    pidfile='/var/pid')
+            r = detach('/bin/boo', ['a', 'b', 'c'],
+                       logfile='/var/log', pidfile='/var/pid')
             context.__enter__.assert_called_with()
             context.__enter__.assert_called_with()
             self.assertTrue(logger.critical.called)
             self.assertTrue(logger.critical.called)
             setup_logs.assert_called_with('ERROR', '/var/log')
             setup_logs.assert_called_with('ERROR', '/var/log')
@@ -84,7 +84,8 @@ class test_Command(Case):
         x = detached_celeryd()
         x = detached_celeryd()
         x.execute_from_commandline(self.argv)
         x.execute_from_commandline(self.argv)
         self.assertTrue(exit.called)
         self.assertTrue(exit.called)
-        detach.assert_called_with(path=x.execv_path, uid=None, gid=None,
+        detach.assert_called_with(
+            path=x.execv_path, uid=None, gid=None,
             umask=0, fake=False, logfile='/var/log', pidfile='celeryd.pid',
             umask=0, fake=False, logfile='/var/log', pidfile='celeryd.pid',
             argv=['-m', 'celery', 'worker', '-c', '1', '-lDEBUG',
             argv=['-m', 'celery', 'worker', '-c', '1', '-lDEBUG',
                   '--logfile=/var/log', '--pidfile=celeryd.pid',
                   '--logfile=/var/log', '--pidfile=celeryd.pid',

+ 76 - 50
celery/tests/bin/test_multi.py

@@ -43,8 +43,10 @@ class test_functions(Case):
     def test_parse_ns_range(self):
     def test_parse_ns_range(self):
         self.assertEqual(parse_ns_range('1-3', True), ['1', '2', '3'])
         self.assertEqual(parse_ns_range('1-3', True), ['1', '2', '3'])
         self.assertEqual(parse_ns_range('1-3', False), ['1-3'])
         self.assertEqual(parse_ns_range('1-3', False), ['1-3'])
-        self.assertEqual(parse_ns_range('1-3,10,11,20', True),
-                ['1', '2', '3', '10', '11', '20'])
+        self.assertEqual(parse_ns_range(
+            '1-3,10,11,20', True),
+            ['1', '2', '3', '10', '11', '20'],
+        )
 
 
     def test_format_opt(self):
     def test_format_opt(self):
         self.assertEqual(format_opt('--foo', None), '--foo')
         self.assertEqual(format_opt('--foo', None), '--foo')
@@ -78,14 +80,16 @@ class test_multi_args(Case):
 
 
     @patch('socket.gethostname')
     @patch('socket.gethostname')
     def test_parse(self, gethostname):
     def test_parse(self, gethostname):
-        p = NamespacedOptionParser(['-c:jerry,elaine', '5',
-                                    '--loglevel:kramer=DEBUG',
-                                    '--flag',
-                                    '--logfile=foo', '-Q', 'bar', 'jerry',
-                                    'elaine', 'kramer',
-                                    '--', '.disable_rate_limits=1'])
+        p = NamespacedOptionParser([
+            '-c:jerry,elaine', '5',
+            '--loglevel:kramer=DEBUG',
+            '--flag',
+            '--logfile=foo', '-Q', 'bar', 'jerry',
+            'elaine', 'kramer',
+            '--', '.disable_rate_limits=1',
+        ])
         it = multi_args(p, cmd='COMMAND', append='*AP*',
         it = multi_args(p, cmd='COMMAND', append='*AP*',
-                prefix='*P*', suffix='*S*')
+                        prefix='*P*', suffix='*S*')
         names = list(it)
         names = list(it)
 
 
         def assert_line_in(name, args):
         def assert_line_in(name, args):
@@ -98,53 +102,62 @@ class test_multi_args(Case):
             for arg in args:
             for arg in args:
                 self.assertIn(arg, argv)
                 self.assertIn(arg, argv)
 
 
-        assert_line_in('*P*jerry@*S*',
-            [
-                'COMMAND', '-n *P*jerry@*S*', '-Q bar',
-                '-c 5', '--flag', '--logfile=foo',
-                '-- .disable_rate_limits=1', '*AP*',
-            ]
+        assert_line_in(
+            '*P*jerry@*S*',
+            ['COMMAND', '-n *P*jerry@*S*', '-Q bar',
+             '-c 5', '--flag', '--logfile=foo',
+             '-- .disable_rate_limits=1', '*AP*'],
         )
         )
-        assert_line_in('*P*elaine@*S*',
-            [
-                'COMMAND', '-n *P*elaine@*S*', '-Q bar',
-                '-c 5', '--flag', '--logfile=foo',
-                '-- .disable_rate_limits=1', '*AP*',
-            ]
+        assert_line_in(
+            '*P*elaine@*S*',
+            ['COMMAND', '-n *P*elaine@*S*', '-Q bar',
+             '-c 5', '--flag', '--logfile=foo',
+             '-- .disable_rate_limits=1', '*AP*'],
         )
         )
-        assert_line_in('*P*kramer@*S*',
-            [
-                'COMMAND', '--loglevel=DEBUG', '-n *P*kramer@*S*',
-                '-Q bar', '--flag', '--logfile=foo',
-                '-- .disable_rate_limits=1', '*AP*',
-            ]
+        assert_line_in(
+            '*P*kramer@*S*',
+            ['COMMAND', '--loglevel=DEBUG', '-n *P*kramer@*S*',
+             '-Q bar', '--flag', '--logfile=foo',
+             '-- .disable_rate_limits=1', '*AP*'],
         )
         )
         expand = names[0][2]
         expand = names[0][2]
         self.assertEqual(expand('%h'), '*P*jerry@*S*')
         self.assertEqual(expand('%h'), '*P*jerry@*S*')
         self.assertEqual(expand('%n'), 'jerry')
         self.assertEqual(expand('%n'), 'jerry')
         names2 = list(multi_args(p, cmd='COMMAND', append='',
         names2 = list(multi_args(p, cmd='COMMAND', append='',
-                prefix='*P*', suffix='*S*'))
+                      prefix='*P*', suffix='*S*'))
         self.assertEqual(names2[0][1][-1], '-- .disable_rate_limits=1')
         self.assertEqual(names2[0][1][-1], '-- .disable_rate_limits=1')
 
 
         gethostname.return_value = 'example.com'
         gethostname.return_value = 'example.com'
         p2 = NamespacedOptionParser(['10', '-c:1', '5'])
         p2 = NamespacedOptionParser(['10', '-c:1', '5'])
         names3 = list(multi_args(p2, cmd='COMMAND'))
         names3 = list(multi_args(p2, cmd='COMMAND'))
         self.assertEqual(len(names3), 10)
         self.assertEqual(len(names3), 10)
-        self.assertEqual(names3[0][0:2], ('celery1@example.com',
-            ['COMMAND', '-n celery1@example.com', '-c 5', '']))
+        self.assertEqual(
+            names3[0][0:2],
+            ('celery1@example.com',
+             ['COMMAND', '-n celery1@example.com', '-c 5', '']),
+        )
         for i, worker in enumerate(names3[1:]):
         for i, worker in enumerate(names3[1:]):
-            self.assertEqual(worker[0:2], ('celery%s@example.com' % (i + 2),
-                ['COMMAND', '-n celery%s@example.com' % (i + 2), '']))
+            self.assertEqual(
+                worker[0:2],
+                ('celery%s@example.com' % (i + 2),
+                 ['COMMAND', '-n celery%s@example.com' % (i + 2), '']),
+            )
 
 
         names4 = list(multi_args(p2, cmd='COMMAND', suffix='""'))
         names4 = list(multi_args(p2, cmd='COMMAND', suffix='""'))
         self.assertEqual(len(names4), 10)
         self.assertEqual(len(names4), 10)
-        self.assertEqual(names4[0][0:2], ('celery1@',
-            ['COMMAND', '-n celery1@', '-c 5', '']))
+        self.assertEqual(
+            names4[0][0:2],
+            ('celery1@',
+             ['COMMAND', '-n celery1@', '-c 5', '']),
+        )
 
 
         p3 = NamespacedOptionParser(['foo@', '-c:foo', '5'])
         p3 = NamespacedOptionParser(['foo@', '-c:foo', '5'])
         names5 = list(multi_args(p3, cmd='COMMAND', suffix='""'))
         names5 = list(multi_args(p3, cmd='COMMAND', suffix='""'))
-        self.assertEqual(names5[0][0:2], ('foo@',
-            ['COMMAND', '-n foo@', '-c 5', '']))
+        self.assertEqual(
+            names5[0][0:2],
+            ('foo@',
+             ['COMMAND', '-n foo@', '-c 5', '']),
+        )
 
 
 
 
 class test_MultiTool(Case):
 class test_MultiTool(Case):
@@ -198,7 +211,8 @@ class test_MultiTool(Case):
         pipe.wait.return_value = 2
         pipe.wait.return_value = 2
         self.assertEqual(self.t.waitexec(['-m', 'foo'], 'path'), 2)
         self.assertEqual(self.t.waitexec(['-m', 'foo'], 'path'), 2)
         self.t.note.assert_called_with(
         self.t.note.assert_called_with(
-                '* Child terminated with errorcode 2')
+            '* Child terminated with errorcode 2',
+        )
 
 
         pipe.wait.return_value = 0
         pipe.wait.return_value = 0
         self.assertFalse(self.t.waitexec(['-m', 'foo', 'path']))
         self.assertFalse(self.t.waitexec(['-m', 'foo', 'path']))
@@ -223,8 +237,9 @@ class test_MultiTool(Case):
 
 
     def test_expand(self):
     def test_expand(self):
         self.t.expand(['foo%n', 'ask', 'klask', 'dask'])
         self.t.expand(['foo%n', 'ask', 'klask', 'dask'])
-        self.assertEqual(self.fh.getvalue(),
-                'fooask\nfooklask\nfoodask\n')
+        self.assertEqual(
+            self.fh.getvalue(), 'fooask\nfooklask\nfoodask\n',
+        )
 
 
     def test_restart(self):
     def test_restart(self):
         stop = self.t._stop_nodes = Mock()
         stop = self.t._stop_nodes = Mock()
@@ -297,20 +312,25 @@ class test_MultiTool(Case):
         nodes = self.t.getpids(p, 'celery worker', callback=callback)
         nodes = self.t.getpids(p, 'celery worker', callback=callback)
         node_0, node_1 = nodes
         node_0, node_1 = nodes
         self.assertEqual(node_0[0], 'foo@e.com')
         self.assertEqual(node_0[0], 'foo@e.com')
-        self.assertEqual(sorted(node_0[1]),
+        self.assertEqual(
+            sorted(node_0[1]),
             sorted(('celery worker', '--pidfile=foo.pid',
             sorted(('celery worker', '--pidfile=foo.pid',
-                    '-n foo@e.com', '')))
+                    '-n foo@e.com', '')),
+        )
         self.assertEqual(node_0[2], 10)
         self.assertEqual(node_0[2], 10)
 
 
         self.assertEqual(node_1[0], 'bar@e.com')
         self.assertEqual(node_1[0], 'bar@e.com')
-        self.assertEqual(sorted(node_1[1]),
+        self.assertEqual(
+            sorted(node_1[1]),
             sorted(('celery worker', '--pidfile=bar.pid',
             sorted(('celery worker', '--pidfile=bar.pid',
-                    '-n bar@e.com', '')))
+                    '-n bar@e.com', '')),
+        )
         self.assertEqual(node_1[2], 11)
         self.assertEqual(node_1[2], 11)
         self.assertTrue(callback.called)
         self.assertTrue(callback.called)
         cargs, _ = callback.call_args
         cargs, _ = callback.call_args
         self.assertEqual(cargs[0], 'baz@e.com')
         self.assertEqual(cargs[0], 'baz@e.com')
-        self.assertItemsEqual(cargs[1],
+        self.assertItemsEqual(
+            cargs[1],
             ['celery worker', '--pidfile=baz.pid', '-n baz@e.com', ''],
             ['celery worker', '--pidfile=baz.pid', '-n baz@e.com', ''],
         )
         )
         self.assertIsNone(cargs[2])
         self.assertIsNone(cargs[2])
@@ -334,10 +354,14 @@ class test_MultiTool(Case):
         self.t.stop(['foo', 'bar', 'baz'], 'celery worker', callback=callback)
         self.t.stop(['foo', 'bar', 'baz'], 'celery worker', callback=callback)
         sigs = sorted(self.t.signal_node.call_args_list)
         sigs = sorted(self.t.signal_node.call_args_list)
         self.assertEqual(len(sigs), 2)
         self.assertEqual(len(sigs), 2)
-        self.assertIn(('foo@e.com', 10, signal.SIGTERM),
-                [tup[0] for tup in sigs])
-        self.assertIn(('bar@e.com', 11, signal.SIGTERM),
-                [tup[0] for tup in sigs])
+        self.assertIn(
+            ('foo@e.com', 10, signal.SIGTERM),
+            [tup[0] for tup in sigs],
+        )
+        self.assertIn(
+            ('bar@e.com', 11, signal.SIGTERM),
+            [tup[0] for tup in sigs],
+        )
         self.t.signal_node.return_value = False
         self.t.signal_node.return_value = False
         self.assertTrue(callback.called)
         self.assertTrue(callback.called)
         self.t.stop(['foo', 'bar', 'baz'], 'celery worker', callback=None)
         self.t.stop(['foo', 'bar', 'baz'], 'celery worker', callback=None)
@@ -428,8 +452,10 @@ class test_MultiTool(Case):
         self.t.execute_from_commandline(['multi', '-foo'])
         self.t.execute_from_commandline(['multi', '-foo'])
         self.t.error.assert_called_with()
         self.t.error.assert_called_with()
 
 
-        self.t.execute_from_commandline(['multi', 'start', 'foo',
-                '--nosplash', '--quiet', '-q', '--verbose', '--no-color'])
+        self.t.execute_from_commandline(
+            ['multi', 'start', 'foo',
+             '--nosplash', '--quiet', '-q', '--verbose', '--no-color'],
+        )
         self.assertTrue(self.t.nosplash)
         self.assertTrue(self.t.nosplash)
         self.assertTrue(self.t.quiet)
         self.assertTrue(self.t.quiet)
         self.assertTrue(self.t.verbose)
         self.assertTrue(self.t.verbose)

+ 13 - 11
celery/tests/bin/test_worker.py

@@ -202,8 +202,7 @@ class test_Worker(WorkerAppCase):
 
 
         # test when there are too few output lines
         # test when there are too few output lines
         # to draft the ascii art onto
         # to draft the ascii art onto
-        prev, cd.ARTLINES = (cd.ARTLINES,
-            ['the quick brown fox'])
+        prev, cd.ARTLINES = cd.ARTLINES, ['the quick brown fox']
         self.assertTrue(worker.startup_info())
         self.assertTrue(worker.startup_info())
 
 
     @disable_stdouts
     @disable_stdouts
@@ -222,10 +221,10 @@ class test_Worker(WorkerAppCase):
         app = current_app
         app = current_app
         c = app.conf
         c = app.conf
         p, app.amqp.queues = app.amqp.queues, app.amqp.Queues({
         p, app.amqp.queues = app.amqp.queues, app.amqp.Queues({
-                'celery': {'exchange': 'celery',
-                           'routing_key': 'celery'},
-                'video': {'exchange': 'video',
-                           'routing_key': 'video'}})
+            'celery': {'exchange': 'celery',
+                       'routing_key': 'celery'},
+            'video': {'exchange': 'video',
+                      'routing_key': 'video'}})
         try:
         try:
             worker = self.Worker()
             worker = self.Worker()
             worker.setup_queues(['video'])
             worker.setup_queues(['video'])
@@ -259,8 +258,10 @@ class test_Worker(WorkerAppCase):
         worker1 = self.Worker(include='some.module')
         worker1 = self.Worker(include='some.module')
         self.assertListEqual(worker1.include, ['some.module'])
         self.assertListEqual(worker1.include, ['some.module'])
         worker2 = self.Worker(include='some.module,another.package')
         worker2 = self.Worker(include='some.module,another.package')
-        self.assertListEqual(worker2.include,
-                ['some.module', 'another.package'])
+        self.assertListEqual(
+            worker2.include,
+            ['some.module', 'another.package'],
+        )
         self.Worker(include=['os', 'sys'])
         self.Worker(include=['os', 'sys'])
 
 
     @disable_stdouts
     @disable_stdouts
@@ -280,7 +281,8 @@ class test_Worker(WorkerAppCase):
 
 
         prev, os.getuid = os.getuid, getuid
         prev, os.getuid = os.getuid, getuid
         try:
         try:
-            with self.assertWarnsRegex(RuntimeWarning,
+            with self.assertWarnsRegex(
+                    RuntimeWarning,
                     r'superuser privileges is discouraged'):
                     r'superuser privileges is discouraged'):
                 worker = self.Worker()
                 worker = self.Worker()
                 worker.on_start()
                 worker.on_start()
@@ -535,7 +537,7 @@ class test_signal_handlers(WorkerAppCase):
                 c.return_value = 3
                 c.return_value = 3
                 worker = self._Worker()
                 worker = self._Worker()
                 handlers = self.psig(
                 handlers = self.psig(
-                        cd.install_worker_term_hard_handler, worker)
+                    cd.install_worker_term_hard_handler, worker)
                 try:
                 try:
                     handlers['SIGQUIT']('SIGQUIT', object())
                     handlers['SIGQUIT']('SIGQUIT', object())
                     self.assertTrue(state.should_terminate)
                     self.assertTrue(state.should_terminate)
@@ -545,7 +547,7 @@ class test_signal_handlers(WorkerAppCase):
                 c.return_value = 1
                 c.return_value = 1
                 worker = self._Worker()
                 worker = self._Worker()
                 handlers = self.psig(
                 handlers = self.psig(
-                        cd.install_worker_term_hard_handler, worker)
+                    cd.install_worker_term_hard_handler, worker)
                 with self.assertRaises(SystemTerminate):
                 with self.assertRaises(SystemTerminate):
                     handlers['SIGQUIT']('SIGQUIT', object())
                     handlers['SIGQUIT']('SIGQUIT', object())
         finally:
         finally:

+ 5 - 4
celery/tests/concurrency/test_concurrency.py

@@ -29,8 +29,9 @@ class test_BasePool(Case):
                      accept_callback=gen_callback('accept_callback'))
                      accept_callback=gen_callback('accept_callback'))
 
 
         self.assertDictContainsSubset(
         self.assertDictContainsSubset(
-            {'target': (1, (8, 16)),
-             'callback': (2, (42, ))}, scratch)
+            {'target': (1, (8, 16)), 'callback': (2, (42, ))},
+            scratch,
+        )
         pa1 = scratch['accept_callback']
         pa1 = scratch['accept_callback']
         self.assertEqual(0, pa1[0])
         self.assertEqual(0, pa1[0])
         self.assertEqual(pa1[1][0], os.getpid())
         self.assertEqual(pa1[1][0], os.getpid())
@@ -43,8 +44,8 @@ class test_BasePool(Case):
                      callback=gen_callback('callback'),
                      callback=gen_callback('callback'),
                      accept_callback=None)
                      accept_callback=None)
         self.assertDictEqual(scratch,
         self.assertDictEqual(scratch,
-                              {'target': (3, (8, 16)),
-                               'callback': (4, (42, ))})
+                             {'target': (3, (8, 16)),
+                              'callback': (4, (42, ))})
 
 
     def test_does_not_debug(self):
     def test_does_not_debug(self):
         x = BasePool(10)
         x = BasePool(10)

+ 2 - 2
celery/tests/concurrency/test_gevent.py

@@ -59,8 +59,8 @@ class test_Schedule(Case):
     def test_sched(self):
     def test_sched(self):
         with mock_module(*gevent_modules):
         with mock_module(*gevent_modules):
             with patch_many('gevent.greenlet',
             with patch_many('gevent.greenlet',
-                    'gevent.greenlet.GreenletExit') as (greenlet,
-                                                        GreenletExit):
+                            'gevent.greenlet.GreenletExit') as (greenlet,
+                                                                GreenletExit):
                 greenlet.Greenlet = object
                 greenlet.Greenlet = object
                 x = Schedule()
                 x = Schedule()
                 greenlet.Greenlet = Mock()
                 greenlet.Greenlet = Mock()

+ 4 - 4
celery/tests/concurrency/test_pool.py

@@ -59,7 +59,7 @@ class test_TaskPool(Case):
         self.assertEqual(res.get(), 100)
         self.assertEqual(res.get(), 100)
         time.sleep(0.5)
         time.sleep(0.5)
         self.assertDictContainsSubset({'ret_value': 100},
         self.assertDictContainsSubset({'ret_value': 100},
-                                       scratchpad.get(0))
+                                      scratchpad.get(0))
 
 
         self.assertIsInstance(res2.get(), ExceptionInfo)
         self.assertIsInstance(res2.get(), ExceptionInfo)
         self.assertTrue(scratchpad.get(1))
         self.assertTrue(scratchpad.get(1))
@@ -67,17 +67,17 @@ class test_TaskPool(Case):
         self.assertIsInstance(scratchpad[1]['ret_value'],
         self.assertIsInstance(scratchpad[1]['ret_value'],
                               ExceptionInfo)
                               ExceptionInfo)
         self.assertEqual(scratchpad[1]['ret_value'].exception.args,
         self.assertEqual(scratchpad[1]['ret_value'].exception.args,
-                          ('FOO EXCEPTION', ))
+                         ('FOO EXCEPTION', ))
 
 
         self.assertEqual(res3.get(), 400)
         self.assertEqual(res3.get(), 400)
         time.sleep(0.5)
         time.sleep(0.5)
         self.assertDictContainsSubset({'ret_value': 400},
         self.assertDictContainsSubset({'ret_value': 400},
-                                       scratchpad.get(2))
+                                      scratchpad.get(2))
 
 
         res3 = p.apply_async(do_something, args=[30], callback=mycallback)
         res3 = p.apply_async(do_something, args=[30], callback=mycallback)
 
 
         self.assertEqual(res3.get(), 900)
         self.assertEqual(res3.get(), 900)
         time.sleep(0.5)
         time.sleep(0.5)
         self.assertDictContainsSubset({'ret_value': 900},
         self.assertDictContainsSubset({'ret_value': 900},
-                                       scratchpad.get(3))
+                                      scratchpad.get(3))
         p.stop()
         p.stop()

+ 6 - 3
celery/tests/concurrency/test_threads.py

@@ -52,8 +52,11 @@ class test_TaskPool(Case):
             accept_callback = Mock()
             accept_callback = Mock()
             target = Mock()
             target = Mock()
             req = x.on_apply(target, args=(1, 2), kwargs={'a': 10},
             req = x.on_apply(target, args=(1, 2), kwargs={'a': 10},
-                callback=callback, accept_callback=accept_callback)
-            x.WorkRequest.assert_called_with(apply_target, (
-                target, (1, 2), {'a': 10}, callback, accept_callback))
+                             callback=callback,
+                             accept_callback=accept_callback)
+            x.WorkRequest.assert_called_with(
+                apply_target,
+                (target, (1, 2), {'a': 10}, callback, accept_callback),
+            )
             x._pool.putRequest.assert_called_with(req)
             x._pool.putRequest.assert_called_with(req)
             x._pool._results_queue.queue.clear.assert_called_with()
             x._pool._results_queue.queue.clear.assert_called_with()

+ 2 - 2
celery/tests/config.py

@@ -34,8 +34,8 @@ CELERY_MONGODB_BACKEND_SETTINGS = {
     'host': os.environ.get('MONGO_HOST') or 'localhost',
     'host': os.environ.get('MONGO_HOST') or 'localhost',
     'port': os.environ.get('MONGO_PORT') or 27017,
     'port': os.environ.get('MONGO_PORT') or 27017,
     'database': os.environ.get('MONGO_DB') or 'celery_unittests',
     'database': os.environ.get('MONGO_DB') or 'celery_unittests',
-    'taskmeta_collection': os.environ.get('MONGO_TASKMETA_COLLECTION') or
-        'taskmeta_collection',
+    'taskmeta_collection': (os.environ.get('MONGO_TASKMETA_COLLECTION')
+                            or 'taskmeta_collection'),
 }
 }
 if os.environ.get('MONGO_USER'):
 if os.environ.get('MONGO_USER'):
     CELERY_MONGODB_BACKEND_SETTINGS['user'] = os.environ.get('MONGO_USER')
     CELERY_MONGODB_BACKEND_SETTINGS['user'] = os.environ.get('MONGO_USER')

+ 17 - 7
celery/tests/contrib/test_migrate.py

@@ -14,13 +14,23 @@ from celery.tests.utils import AppCase, Case, Mock
 
 
 
 
 def Message(body, exchange='exchange', routing_key='rkey',
 def Message(body, exchange='exchange', routing_key='rkey',
-        compression=None, content_type='application/json',
-        content_encoding='utf-8'):
-    return Mock(attrs=dict(body=body,
-        delivery_info=dict(exchange=exchange, routing_key=routing_key),
-        headers=dict(compression=compression),
-        content_type=content_type, content_encoding=content_encoding,
-        properties={}))
+            compression=None, content_type='application/json',
+            content_encoding='utf-8'):
+    return Mock(
+        attrs={
+            'body': body,
+            'delivery_info': {
+                'exchange': exchange,
+                'routing_key': routing_key,
+            },
+            'headers': {
+                'compression': compression,
+            },
+            'content_type': content_type,
+            'content_encoding': content_encoding,
+            'properties': {}
+        },
+    )
 
 
 
 
 class test_State(Case):
 class test_State(Case):

+ 2 - 2
celery/tests/events/test_events.py

@@ -96,11 +96,11 @@ class test_EventDispatcher(AppCase):
                                                     enabled=True)
                                                     enabled=True)
             dispatcher2 = self.app.events.Dispatcher(connection,
             dispatcher2 = self.app.events.Dispatcher(connection,
                                                      enabled=True,
                                                      enabled=True,
-                                                      channel=channel)
+                                                     channel=channel)
             self.assertTrue(dispatcher.enabled)
             self.assertTrue(dispatcher.enabled)
             self.assertTrue(dispatcher.publisher.channel)
             self.assertTrue(dispatcher.publisher.channel)
             self.assertEqual(dispatcher.publisher.serializer,
             self.assertEqual(dispatcher.publisher.serializer,
-                            self.app.conf.CELERY_EVENT_SERIALIZER)
+                             self.app.conf.CELERY_EVENT_SERIALIZER)
 
 
             created_channel = dispatcher.publisher.channel
             created_channel = dispatcher.publisher.channel
             dispatcher.disable()
             dispatcher.disable()

+ 10 - 10
celery/tests/events/test_state.py

@@ -59,7 +59,7 @@ class ev_worker_heartbeats(replay):
     def setup(self):
     def setup(self):
         self.events = [
         self.events = [
             Event('worker-heartbeat', hostname='utest1',
             Event('worker-heartbeat', hostname='utest1',
-                timestamp=time() - HEARTBEAT_EXPIRE_WINDOW * 2),
+                  timestamp=time() - HEARTBEAT_EXPIRE_WINDOW * 2),
             Event('worker-heartbeat', hostname='utest1'),
             Event('worker-heartbeat', hostname='utest1'),
         ]
         ]
 
 
@@ -70,16 +70,16 @@ class ev_task_states(replay):
         tid = self.tid = uuid()
         tid = self.tid = uuid()
         self.events = [
         self.events = [
             Event('task-received', uuid=tid, name='task1',
             Event('task-received', uuid=tid, name='task1',
-                args='(2, 2)', kwargs="{'foo': 'bar'}",
-                retries=0, eta=None, hostname='utest1'),
+                  args='(2, 2)', kwargs="{'foo': 'bar'}",
+                  retries=0, eta=None, hostname='utest1'),
             Event('task-started', uuid=tid, hostname='utest1'),
             Event('task-started', uuid=tid, hostname='utest1'),
             Event('task-revoked', uuid=tid, hostname='utest1'),
             Event('task-revoked', uuid=tid, hostname='utest1'),
             Event('task-retried', uuid=tid, exception="KeyError('bar')",
             Event('task-retried', uuid=tid, exception="KeyError('bar')",
-                traceback='line 2 at main', hostname='utest1'),
+                  traceback='line 2 at main', hostname='utest1'),
             Event('task-failed', uuid=tid, exception="KeyError('foo')",
             Event('task-failed', uuid=tid, exception="KeyError('foo')",
-                traceback='line 1 at main', hostname='utest1'),
+                  traceback='line 1 at main', hostname='utest1'),
             Event('task-succeeded', uuid=tid, result='4',
             Event('task-succeeded', uuid=tid, result='4',
-                runtime=0.1234, hostname='utest1'),
+                  runtime=0.1234, hostname='utest1'),
         ]
         ]
 
 
 
 
@@ -95,7 +95,7 @@ class ev_snapshot(replay):
             worker = not i % 2 and 'utest2' or 'utest1'
             worker = not i % 2 and 'utest2' or 'utest1'
             type = not i % 2 and 'task2' or 'task1'
             type = not i % 2 and 'task2' or 'task1'
             self.events.append(Event('task-received', name=type,
             self.events.append(Event('task-received', name=type,
-                          uuid=uuid(), hostname=worker))
+                               uuid=uuid(), hostname=worker))
 
 
 
 
 class test_Worker(Case):
 class test_Worker(Case):
@@ -128,10 +128,10 @@ class test_Task(Case):
                     routing_key='celery',
                     routing_key='celery',
                     succeeded=time())
                     succeeded=time())
         self.assertEqual(sorted(list(task._info_fields)),
         self.assertEqual(sorted(list(task._info_fields)),
-                              sorted(task.info().keys()))
+                         sorted(task.info().keys()))
 
 
         self.assertEqual(sorted(list(task._info_fields + ('received', ))),
         self.assertEqual(sorted(list(task._info_fields + ('received', ))),
-                              sorted(task.info(extra=('received', ))))
+                         sorted(task.info(extra=('received', ))))
 
 
         self.assertEqual(sorted(['args', 'kwargs']),
         self.assertEqual(sorted(['args', 'kwargs']),
                          sorted(task.info(['args', 'kwargs']).keys()))
                          sorted(task.info(['args', 'kwargs']).keys()))
@@ -208,7 +208,7 @@ class test_State(Case):
         # STARTED
         # STARTED
         next(r)
         next(r)
         self.assertTrue(r.state.workers['utest1'].alive,
         self.assertTrue(r.state.workers['utest1'].alive,
-                'any task event adds worker heartbeat')
+                        'any task event adds worker heartbeat')
         self.assertEqual(task.state, states.STARTED)
         self.assertEqual(task.state, states.STARTED)
         self.assertTrue(task.started)
         self.assertTrue(task.started)
         self.assertEqual(task.timestamp, task.started)
         self.assertEqual(task.timestamp, task.started)

+ 16 - 12
celery/tests/functional/case.py

@@ -64,16 +64,20 @@ class Worker(object):
         return self.hostname in flatten_reply(r)
         return self.hostname in flatten_reply(r)
 
 
     def wait_until_started(self, timeout=10, interval=0.5):
     def wait_until_started(self, timeout=10, interval=0.5):
-        try_while(lambda: self.is_alive(interval),
-                "Worker won't start (after %s secs.)" % timeout,
-                interval=interval, timeout=timeout)
+        try_while(
+            lambda: self.is_alive(interval),
+            "Worker won't start (after %s secs.)" % timeout,
+            interval=interval, timeout=timeout,
+        )
         say('--WORKER %s IS ONLINE--' % self.hostname)
         say('--WORKER %s IS ONLINE--' % self.hostname)
 
 
     def ensure_shutdown(self, timeout=10, interval=0.5):
     def ensure_shutdown(self, timeout=10, interval=0.5):
         os.kill(self.pid, signal.SIGTERM)
         os.kill(self.pid, signal.SIGTERM)
-        try_while(lambda: not self.is_alive(interval),
-                  "Worker won't shutdown (after %s secs.)" % timeout,
-                  timeout=10, interval=0.5)
+        try_while(
+            lambda: not self.is_alive(interval),
+            "Worker won't shutdown (after %s secs.)" % timeout,
+            timeout=10, interval=0.5,
+        )
         say('--WORKER %s IS SHUTDOWN--' % self.hostname)
         say('--WORKER %s IS SHUTDOWN--' % self.hostname)
         self._shutdown_called = True
         self._shutdown_called = True
 
 
@@ -96,8 +100,8 @@ class Worker(object):
         def _ensure_shutdown_once():
         def _ensure_shutdown_once():
             if not worker._shutdown_called:
             if not worker._shutdown_called:
                 say('-- Found worker not stopped at shutdown: %s\n%s' % (
                 say('-- Found worker not stopped at shutdown: %s\n%s' % (
-                        worker.hostname,
-                        '\n'.join(stack)))
+                    worker.hostname,
+                    '\n'.join(stack)))
                 worker.ensure_shutdown()
                 worker.ensure_shutdown()
 
 
         return worker
         return worker
@@ -161,10 +165,10 @@ class WorkerCase(Case):
 
 
     def ensure_received(self, task_id, interval=0.5, timeout=10):
     def ensure_received(self, task_id, interval=0.5, timeout=10):
         return try_while(lambda: self.is_received(task_id, interval),
         return try_while(lambda: self.is_received(task_id, interval),
-                        'Task not receied within timeout',
-                        interval=0.5, timeout=10)
+                         'Task not receied within timeout',
+                         interval=0.5, timeout=10)
 
 
     def ensure_scheduled(self, task_id, interval=0.5, timeout=10):
     def ensure_scheduled(self, task_id, interval=0.5, timeout=10):
         return try_while(lambda: self.is_scheduled(task_id, interval),
         return try_while(lambda: self.is_scheduled(task_id, interval),
-                        'Task not scheduled within timeout',
-                        interval=0.5, timeout=10)
+                         'Task not scheduled within timeout',
+                         interval=0.5, timeout=10)

+ 1 - 1
celery/tests/security/test_security.py

@@ -39,7 +39,7 @@ class test_security(SecurityCase):
         self.assertEqual(0, len(disabled))
         self.assertEqual(0, len(disabled))
 
 
         disable_untrusted_serializers(
         disable_untrusted_serializers(
-                ['application/json', 'application/x-python-serialize'])
+            ['application/json', 'application/x-python-serialize'])
         self.assertIn('application/x-yaml', disabled)
         self.assertIn('application/x-yaml', disabled)
         self.assertNotIn('application/json', disabled)
         self.assertNotIn('application/json', disabled)
         self.assertNotIn('application/x-python-serialize', disabled)
         self.assertNotIn('application/x-python-serialize', disabled)

Some files were not shown because too many files changed in this diff