Browse Source

Fixes flakes E126,E127,E128

Ask Solem 12 years ago
parent
commit
6c004c93e2
100 changed files with 1071 additions and 874 deletions
  1. 5 5
      celery/__compat__.py
  2. 2 1
      celery/__init__.py
  3. 4 2
      celery/_state.py
  4. 7 6
      celery/app/__init__.py
  5. 3 3
      celery/app/abstract.py
  6. 55 45
      celery/app/amqp.py
  7. 59 51
      celery/app/base.py
  8. 20 16
      celery/app/builtins.py
  9. 32 26
      celery/app/control.py
  10. 15 12
      celery/app/defaults.py
  11. 34 24
      celery/app/log.py
  12. 1 1
      celery/app/registry.py
  13. 2 2
      celery/app/routes.py
  14. 14 12
      celery/app/task.py
  15. 4 3
      celery/app/utils.py
  16. 5 4
      celery/apps/beat.py
  17. 21 17
      celery/apps/worker.py
  18. 1 1
      celery/backends/__init__.py
  19. 25 22
      celery/backends/amqp.py
  20. 24 23
      celery/backends/base.py
  21. 8 7
      celery/backends/cassandra.py
  22. 20 16
      celery/backends/database/__init__.py
  23. 2 2
      celery/backends/database/dfd042c7.py
  24. 3 3
      celery/backends/database/models.py
  25. 3 3
      celery/backends/mongodb.py
  26. 1 1
      celery/backends/redis.py
  27. 29 23
      celery/beat.py
  28. 5 5
      celery/bin/base.py
  29. 5 5
      celery/bin/camqadm.py
  30. 69 67
      celery/bin/celery.py
  31. 9 9
      celery/bin/celeryd.py
  32. 6 5
      celery/bin/celeryd_detach.py
  33. 12 10
      celery/bin/celeryd_multi.py
  34. 2 2
      celery/bin/celeryev.py
  35. 39 25
      celery/canvas.py
  36. 5 5
      celery/concurrency/base.py
  37. 4 3
      celery/concurrency/eventlet.py
  38. 2 1
      celery/concurrency/gevent.py
  39. 2 2
      celery/concurrency/threads.py
  40. 7 5
      celery/contrib/batches.py
  41. 32 16
      celery/contrib/bundles.py
  42. 13 13
      celery/contrib/migrate.py
  43. 5 4
      celery/contrib/rdb.py
  44. 2 2
      celery/datastructures.py
  45. 5 5
      celery/events/__init__.py
  46. 43 32
      celery/events/cursesmon.py
  47. 15 16
      celery/events/dumper.py
  48. 2 2
      celery/events/snapshot.py
  49. 12 11
      celery/events/state.py
  50. 1 1
      celery/loaders/__init__.py
  51. 24 22
      celery/loaders/base.py
  52. 1 1
      celery/loaders/default.py
  53. 2 2
      celery/local.py
  54. 2 2
      celery/platforms.py
  55. 5 5
      celery/result.py
  56. 37 35
      celery/schedules.py
  57. 1 1
      celery/security/__init__.py
  58. 7 7
      celery/security/serialization.py
  59. 2 1
      celery/task/__init__.py
  60. 14 12
      celery/task/base.py
  61. 2 2
      celery/task/http.py
  62. 2 2
      celery/task/sets.py
  63. 2 2
      celery/task/trace.py
  64. 2 2
      celery/tests/__init__.py
  65. 2 2
      celery/tests/app/test_amqp.py
  66. 17 13
      celery/tests/app/test_app.py
  67. 1 1
      celery/tests/app/test_beat.py
  68. 3 2
      celery/tests/app/test_builtins.py
  69. 1 1
      celery/tests/app/test_celery.py
  70. 4 3
      celery/tests/app/test_control.py
  71. 16 13
      celery/tests/app/test_loaders.py
  72. 7 5
      celery/tests/app/test_log.py
  73. 11 8
      celery/tests/app/test_routes.py
  74. 6 6
      celery/tests/backends/test_amqp.py
  75. 2 2
      celery/tests/backends/test_base.py
  76. 2 2
      celery/tests/backends/test_cache.py
  77. 9 8
      celery/tests/backends/test_cassandra.py
  78. 3 2
      celery/tests/bin/test_base.py
  79. 8 4
      celery/tests/bin/test_camqadm.py
  80. 9 5
      celery/tests/bin/test_celery.py
  81. 2 2
      celery/tests/bin/test_celerybeat.py
  82. 13 11
      celery/tests/bin/test_celeryd.py
  83. 5 4
      celery/tests/bin/test_celeryd_detach.py
  84. 57 37
      celery/tests/bin/test_celeryd_multi.py
  85. 3 3
      celery/tests/compat.py
  86. 2 2
      celery/tests/concurrency/test_concurrency.py
  87. 2 2
      celery/tests/concurrency/test_gevent.py
  88. 4 4
      celery/tests/concurrency/test_pool.py
  89. 6 3
      celery/tests/concurrency/test_threads.py
  90. 2 2
      celery/tests/config.py
  91. 17 7
      celery/tests/contrib/test_migrate.py
  92. 2 2
      celery/tests/events/test_events.py
  93. 10 10
      celery/tests/events/test_state.py
  94. 16 12
      celery/tests/functional/case.py
  95. 1 1
      celery/tests/security/test_security.py
  96. 3 3
      celery/tests/slow/test_buckets.py
  97. 12 6
      celery/tests/tasks/test_canvas.py
  98. 1 1
      celery/tests/tasks/test_chord.py
  99. 10 8
      celery/tests/tasks/test_http.py
  100. 3 4
      celery/tests/tasks/test_registry.py

+ 5 - 5
celery/__compat__.py

@@ -146,20 +146,20 @@ class MagicModule(ModuleType):
         return list(set(self.__all__) | DEFAULT_ATTRS)
         return list(set(self.__all__) | DEFAULT_ATTRS)
 
 
 
 
-def create_module(name, attrs, cls_attrs=None, pkg=None, base=MagicModule,
-        prepare_attr=None):
+def create_module(name, attrs, cls_attrs=None, pkg=None,
+                  base=MagicModule, prepare_attr=None):
     fqdn = '.'.join([pkg.__name__, name]) if pkg else name
     fqdn = '.'.join([pkg.__name__, name]) if pkg else name
     cls_attrs = {} if cls_attrs is None else cls_attrs
     cls_attrs = {} if cls_attrs is None else cls_attrs
 
 
     attrs = dict((attr_name, prepare_attr(attr) if prepare_attr else attr)
     attrs = dict((attr_name, prepare_attr(attr) if prepare_attr else attr)
-                    for attr_name, attr in attrs.iteritems())
+                 for attr_name, attr in attrs.iteritems())
     module = sys.modules[fqdn] = type(name, (base, ), cls_attrs)(fqdn)
     module = sys.modules[fqdn] = type(name, (base, ), cls_attrs)(fqdn)
     module.__dict__.update(attrs)
     module.__dict__.update(attrs)
     return module
     return module
 
 
 
 
 def recreate_module(name, compat_modules=(), by_module={}, direct={},
 def recreate_module(name, compat_modules=(), by_module={}, direct={},
-        base=MagicModule, **attrs):
+                    base=MagicModule, **attrs):
     old_module = sys.modules[name]
     old_module = sys.modules[name]
     origins = get_origins(by_module)
     origins = get_origins(by_module)
     compat_modules = COMPAT_MODULES.get(name, ())
     compat_modules = COMPAT_MODULES.get(name, ())
@@ -171,7 +171,7 @@ def recreate_module(name, compat_modules=(), by_module={}, direct={},
                                 compat_modules, origins, direct, attrs])))))
                                 compat_modules, origins, direct, attrs])))))
     new_module = create_module(name, attrs, cls_attrs=cattrs, base=base)
     new_module = create_module(name, attrs, cls_attrs=cattrs, base=base)
     new_module.__dict__.update(dict((mod, get_compat_module(new_module, mod))
     new_module.__dict__.update(dict((mod, get_compat_module(new_module, mod))
-                                     for mod in compat_modules))
+                               for mod in compat_modules))
     return old_module, new_module
     return old_module, new_module
 
 
 
 

+ 2 - 1
celery/__init__.py

@@ -42,7 +42,8 @@ if STATICA_HACK:
 # Lazy loading
 # Lazy loading
 from .__compat__ import recreate_module
 from .__compat__ import recreate_module
 
 
-old_module, new_module = recreate_module(__name__,  # pragma: no cover
+old_module, new_module = recreate_module(  # pragma: no cover
+    __name__,
     by_module={
     by_module={
         'celery.app': ['Celery', 'bugreport', 'shared_task'],
         'celery.app': ['Celery', 'bugreport', 'shared_task'],
         'celery.app.task': ['Task'],
         'celery.app.task': ['Task'],

+ 4 - 2
celery/_state.py

@@ -44,9 +44,11 @@ def get_current_app():
     if default_app is None:
     if default_app is None:
         #: creates the global fallback app instance.
         #: creates the global fallback app instance.
         from celery.app import Celery
         from celery.app import Celery
-        set_default_app(Celery('default',
+        set_default_app(Celery(
+            'default',
             loader=os.environ.get('CELERY_LOADER') or 'default',
             loader=os.environ.get('CELERY_LOADER') or 'default',
-            set_as_current=False, accept_magic_kwargs=True))
+            set_as_current=False, accept_magic_kwargs=True,
+        ))
     return _tls.current_app or default_app
     return _tls.current_app or default_app
 
 
 
 

+ 7 - 6
celery/app/__init__.py

@@ -14,10 +14,10 @@ import os
 from celery.local import Proxy
 from celery.local import Proxy
 from celery import _state
 from celery import _state
 from celery._state import (  # noqa
 from celery._state import (  # noqa
-        set_default_app,
-        get_current_app as current_app,
-        get_current_task as current_task,
-        _get_active_apps,
+    set_default_app,
+    get_current_app as current_app,
+    get_current_task as current_task,
+    _get_active_apps,
 )
 )
 from celery.utils import gen_task_name
 from celery.utils import gen_task_name
 
 
@@ -125,8 +125,9 @@ def shared_task(*args, **kwargs):
             # apps task registry.
             # apps task registry.
             def task_by_cons():
             def task_by_cons():
                 app = current_app()
                 app = current_app()
-                return app.tasks[name or gen_task_name(app,
-                            fun.__name__, fun.__module__)]
+                return app.tasks[
+                    name or gen_task_name(app, fun.__name__, fun.__module__)
+                ]
             return Proxy(task_by_cons)
             return Proxy(task_by_cons)
         return __inner
         return __inner
 
 

+ 3 - 3
celery/app/abstract.py

@@ -23,8 +23,8 @@ class _configurated(type):
 
 
     def __new__(cls, name, bases, attrs):
     def __new__(cls, name, bases, attrs):
         attrs['__confopts__'] = dict((attr, spec.get_key(attr))
         attrs['__confopts__'] = dict((attr, spec.get_key(attr))
-                                          for attr, spec in attrs.iteritems()
-                                              if isinstance(spec, from_config))
+                                     for attr, spec in attrs.iteritems()
+                                     if isinstance(spec, from_config))
         inherit_from = attrs.get('inherit_confopts', ())
         inherit_from = attrs.get('inherit_confopts', ())
         for subcls in bases:
         for subcls in bases:
             try:
             try:
@@ -34,7 +34,7 @@ class _configurated(type):
         for subcls in inherit_from:
         for subcls in inherit_from:
             attrs['__confopts__'].update(subcls.__confopts__)
             attrs['__confopts__'].update(subcls.__confopts__)
         attrs = dict((k, v if not isinstance(v, from_config) else None)
         attrs = dict((k, v if not isinstance(v, from_config) else None)
-                        for k, v in attrs.iteritems())
+                     for k, v in attrs.iteritems())
         return super(_configurated, cls).__new__(cls, name, bases, attrs)
         return super(_configurated, cls).__new__(cls, name, bases, attrs)
 
 
 
 

+ 55 - 45
celery/app/amqp.py

@@ -46,7 +46,7 @@ class Queues(dict):
     _consume_from = None
     _consume_from = None
 
 
     def __init__(self, queues=None, default_exchange=None,
     def __init__(self, queues=None, default_exchange=None,
-            create_missing=True, ha_policy=None):
+                 create_missing=True, ha_policy=None):
         dict.__init__(self)
         dict.__init__(self)
         self.aliases = WeakValueDictionary()
         self.aliases = WeakValueDictionary()
         self.default_exchange = default_exchange
         self.default_exchange = default_exchange
@@ -117,12 +117,13 @@ class Queues(dict):
         active = self.consume_from
         active = self.consume_from
         if not active:
         if not active:
             return ''
             return ''
-        info = [QUEUE_FORMAT.strip() % {
-                    'name': (name + ':').ljust(12),
-                    'exchange': q.exchange.name,
-                    'exchange_type': q.exchange.type,
-                    'routing_key': q.routing_key}
-                        for name, q in sorted(active.iteritems())]
+        info = [
+            QUEUE_FORMAT.strip() % {
+                'name': (name + ':').ljust(12),
+                'exchange': q.exchange.name,
+                'exchange_type': q.exchange.type,
+                'routing_key': q.routing_key}
+            for name, q in sorted(active.iteritems())]
         if indent_first:
         if indent_first:
             return textindent('\n'.join(info), indent)
             return textindent('\n'.join(info), indent)
         return info[0] + '\n' + textindent('\n'.join(info[1:]), indent)
         return info[0] + '\n' + textindent('\n'.join(info[1:]), indent)
@@ -169,20 +170,21 @@ class TaskProducer(Producer):
     def __init__(self, channel=None, exchange=None, *args, **kwargs):
     def __init__(self, channel=None, exchange=None, *args, **kwargs):
         self.retry = kwargs.pop('retry', self.retry)
         self.retry = kwargs.pop('retry', self.retry)
         self.retry_policy = kwargs.pop('retry_policy',
         self.retry_policy = kwargs.pop('retry_policy',
-                                        self.retry_policy or {})
+                                       self.retry_policy or {})
         exchange = exchange or self.exchange
         exchange = exchange or self.exchange
         self.queues = self.app.amqp.queues  # shortcut
         self.queues = self.app.amqp.queues  # shortcut
         self.default_queue = self.app.amqp.default_queue
         self.default_queue = self.app.amqp.default_queue
         super(TaskProducer, self).__init__(channel, exchange, *args, **kwargs)
         super(TaskProducer, self).__init__(channel, exchange, *args, **kwargs)
 
 
     def publish_task(self, task_name, task_args=None, task_kwargs=None,
     def publish_task(self, task_name, task_args=None, task_kwargs=None,
-            countdown=None, eta=None, task_id=None, group_id=None,
-            taskset_id=None,  # compat alias to group_id
-            expires=None, exchange=None, exchange_type=None,
-            event_dispatcher=None, retry=None, retry_policy=None,
-            queue=None, now=None, retries=0, chord=None, callbacks=None,
-            errbacks=None, routing_key=None, serializer=None,
-            delivery_mode=None, compression=None, declare=None, **kwargs):
+                     countdown=None, eta=None, task_id=None, group_id=None,
+                     taskset_id=None,  # compat alias to group_id
+                     expires=None, exchange=None, exchange_type=None,
+                     event_dispatcher=None, retry=None, retry_policy=None,
+                     queue=None, now=None, retries=0, chord=None,
+                     callbacks=None, errbacks=None, routing_key=None,
+                     serializer=None, delivery_mode=None, compression=None,
+                     declare=None, **kwargs):
         """Send task message."""
         """Send task message."""
 
 
         qname = queue
         qname = queue
@@ -200,7 +202,7 @@ class TaskProducer(Producer):
         # merge default and custom policy
         # merge default and custom policy
         retry = self.retry if retry is None else retry
         retry = self.retry if retry is None else retry
         _rp = (dict(self.retry_policy, **retry_policy) if retry_policy
         _rp = (dict(self.retry_policy, **retry_policy) if retry_policy
-                                                       else self.retry_policy)
+               else self.retry_policy)
         task_id = task_id or uuid()
         task_id = task_id or uuid()
         task_args = task_args or []
         task_args = task_args or []
         task_kwargs = task_kwargs or {}
         task_kwargs = task_kwargs or {}
@@ -232,29 +234,33 @@ class TaskProducer(Producer):
             'chord': chord,
             'chord': chord,
         }
         }
 
 
-        self.publish(body,
-             exchange=exchange, routing_key=routing_key,
-             serializer=serializer or self.serializer,
-             compression=compression or self.compression,
-             retry=retry, retry_policy=_rp,
-             delivery_mode=delivery_mode, declare=declare,
-             **kwargs)
+        self.publish(
+            body,
+            exchange=exchange, routing_key=routing_key,
+            serializer=serializer or self.serializer,
+            compression=compression or self.compression,
+            retry=retry, retry_policy=_rp,
+            delivery_mode=delivery_mode, declare=declare,
+            **kwargs
+        )
 
 
         signals.task_sent.send(sender=task_name, **body)
         signals.task_sent.send(sender=task_name, **body)
         if event_dispatcher:
         if event_dispatcher:
             exname = exchange or self.exchange
             exname = exchange or self.exchange
             if isinstance(exname, Exchange):
             if isinstance(exname, Exchange):
                 exname = exname.name
                 exname = exname.name
-            event_dispatcher.send('task-sent', uuid=task_id,
-                                               name=task_name,
-                                               args=safe_repr(task_args),
-                                               kwargs=safe_repr(task_kwargs),
-                                               retries=retries,
-                                               eta=eta,
-                                               expires=expires,
-                                               queue=qname,
-                                               exchange=exname,
-                                               routing_key=routing_key)
+            event_dispatcher.send(
+                'task-sent', uuid=task_id,
+                name=task_name,
+                args=safe_repr(task_args),
+                kwargs=safe_repr(task_kwargs),
+                retries=retries,
+                eta=eta,
+                expires=expires,
+                queue=qname,
+                exchange=exname,
+                routing_key=routing_key,
+            )
         return task_id
         return task_id
     delay_task = publish_task   # XXX Compat
     delay_task = publish_task   # XXX Compat
 
 
@@ -266,7 +272,7 @@ class TaskPublisher(TaskProducer):
         self.app = app_or_default(kwargs.pop('app', self.app))
         self.app = app_or_default(kwargs.pop('app', self.app))
         self.retry = kwargs.pop('retry', self.retry)
         self.retry = kwargs.pop('retry', self.retry)
         self.retry_policy = kwargs.pop('retry_policy',
         self.retry_policy = kwargs.pop('retry_policy',
-                                        self.retry_policy or {})
+                                       self.retry_policy or {})
         exchange = exchange or self.exchange
         exchange = exchange or self.exchange
         if not isinstance(exchange, Exchange):
         if not isinstance(exchange, Exchange):
             exchange = Exchange(exchange,
             exchange = Exchange(exchange,
@@ -280,8 +286,10 @@ class TaskConsumer(Consumer):
 
 
     def __init__(self, channel, queues=None, app=None, **kw):
     def __init__(self, channel, queues=None, app=None, **kw):
         self.app = app or self.app
         self.app = app or self.app
-        super(TaskConsumer, self).__init__(channel,
-                queues or self.app.amqp.queues.consume_from.values(), **kw)
+        super(TaskConsumer, self).__init__(
+            channel,
+            queues or self.app.amqp.queues.consume_from.values(), **kw
+        )
 
 
 
 
 class AMQP(object):
 class AMQP(object):
@@ -340,15 +348,17 @@ class AMQP(object):
 
 
         """
         """
         conf = self.app.conf
         conf = self.app.conf
-        return self.app.subclass_with_self(TaskProducer,
-                reverse='amqp.TaskProducer',
-                exchange=self.default_exchange,
-                routing_key=conf.CELERY_DEFAULT_ROUTING_KEY,
-                serializer=conf.CELERY_TASK_SERIALIZER,
-                compression=conf.CELERY_MESSAGE_COMPRESSION,
-                retry=conf.CELERY_TASK_PUBLISH_RETRY,
-                retry_policy=conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
-                utc=conf.CELERY_ENABLE_UTC)
+        return self.app.subclass_with_self(
+            TaskProducer,
+            reverse='amqp.TaskProducer',
+            exchange=self.default_exchange,
+            routing_key=conf.CELERY_DEFAULT_ROUTING_KEY,
+            serializer=conf.CELERY_TASK_SERIALIZER,
+            compression=conf.CELERY_MESSAGE_COMPRESSION,
+            retry=conf.CELERY_TASK_PUBLISH_RETRY,
+            retry_policy=conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
+            utc=conf.CELERY_ENABLE_UTC,
+        )
     TaskPublisher = TaskProducer  # compat
     TaskPublisher = TaskProducer  # compat
 
 
     @cached_property
     @cached_property

+ 59 - 51
celery/app/base.py

@@ -61,9 +61,9 @@ class Celery(object):
     _pool = None
     _pool = None
 
 
     def __init__(self, main=None, loader=None, backend=None,
     def __init__(self, main=None, loader=None, backend=None,
-            amqp=None, events=None, log=None, control=None,
-            set_as_current=True, accept_magic_kwargs=False,
-            tasks=None, broker=None, include=None, **kwargs):
+                 amqp=None, events=None, log=None, control=None,
+                 set_as_current=True, accept_magic_kwargs=False,
+                 tasks=None, broker=None, include=None, **kwargs):
         self.clock = LamportClock()
         self.clock = LamportClock()
         self.main = main
         self.main = main
         self.amqp_cls = amqp or self.amqp_cls
         self.amqp_cls = amqp or self.amqp_cls
@@ -116,12 +116,14 @@ class Celery(object):
         pass
         pass
 
 
     def start(self, argv=None):
     def start(self, argv=None):
-        return instantiate('celery.bin.celery:CeleryCommand', app=self) \
-                    .execute_from_commandline(argv)
+        return instantiate(
+            'celery.bin.celery:CeleryCommand',
+            app=self).execute_from_commandline(argv)
 
 
     def worker_main(self, argv=None):
     def worker_main(self, argv=None):
-        return instantiate('celery.bin.celeryd:WorkerCommand', app=self) \
-                    .execute_from_commandline(argv)
+        return instantiate(
+            'celery.bin.celeryd:WorkerCommand',
+            app=self).execute_from_commandline(argv)
 
 
     def task(self, *args, **opts):
     def task(self, *args, **opts):
         """Creates new task class from any callable."""
         """Creates new task class from any callable."""
@@ -165,11 +167,11 @@ class Celery(object):
         base = options.pop('base', None) or self.Task
         base = options.pop('base', None) or self.Task
 
 
         T = type(fun.__name__, (base, ), dict({
         T = type(fun.__name__, (base, ), dict({
-                'app': self,
-                'accept_magic_kwargs': False,
-                'run': staticmethod(fun),
-                '__doc__': fun.__doc__,
-                '__module__': fun.__module__}, **options))()
+            'app': self,
+            'accept_magic_kwargs': False,
+            'run': staticmethod(fun),
+            '__doc__': fun.__doc__,
+            '__module__': fun.__module__}, **options))()
         task = self._tasks[T.name]  # return global instance.
         task = self._tasks[T.name]  # return global instance.
         task.bind(self)
         task.bind(self)
         return task
         return task
@@ -206,9 +208,9 @@ class Celery(object):
         self.conf.update(self.loader.cmdline_config_parser(argv, namespace))
         self.conf.update(self.loader.cmdline_config_parser(argv, namespace))
 
 
     def send_task(self, name, args=None, kwargs=None, countdown=None,
     def send_task(self, name, args=None, kwargs=None, countdown=None,
-            eta=None, task_id=None, producer=None, connection=None,
-            result_cls=None, expires=None, queues=None, publisher=None,
-            **options):
+                  eta=None, task_id=None, producer=None, connection=None,
+                  result_cls=None, expires=None, queues=None, publisher=None,
+                  **options):
         producer = producer or publisher  # XXX compat
         producer = producer or publisher  # XXX compat
         if self.conf.CELERY_ALWAYS_EAGER:  # pragma: no cover
         if self.conf.CELERY_ALWAYS_EAGER:  # pragma: no cover
             warnings.warn(AlwaysEagerIgnored(
             warnings.warn(AlwaysEagerIgnored(
@@ -220,35 +222,37 @@ class Celery(object):
                            self.conf.CELERY_MESSAGE_COMPRESSION)
                            self.conf.CELERY_MESSAGE_COMPRESSION)
         options = router.route(options, name, args, kwargs)
         options = router.route(options, name, args, kwargs)
         with self.producer_or_acquire(producer) as producer:
         with self.producer_or_acquire(producer) as producer:
-            return result_cls(producer.publish_task(name, args, kwargs,
-                        task_id=task_id,
-                        countdown=countdown, eta=eta,
-                        expires=expires, **options))
+            return result_cls(producer.publish_task(
+                name, args, kwargs,
+                task_id=task_id,
+                countdown=countdown, eta=eta,
+                expires=expires, **options
+            ))
 
 
     def connection(self, hostname=None, userid=None,
     def connection(self, hostname=None, userid=None,
-            password=None, virtual_host=None, port=None, ssl=None,
-            insist=None, connect_timeout=None, transport=None,
-            transport_options=None, heartbeat=None, **kwargs):
+                   password=None, virtual_host=None, port=None, ssl=None,
+                   insist=None, connect_timeout=None, transport=None,
+                   transport_options=None, heartbeat=None, **kwargs):
         conf = self.conf
         conf = self.conf
         return self.amqp.Connection(
         return self.amqp.Connection(
-                    hostname or conf.BROKER_HOST,
-                    userid or conf.BROKER_USER,
-                    password or conf.BROKER_PASSWORD,
-                    virtual_host or conf.BROKER_VHOST,
-                    port or conf.BROKER_PORT,
-                    transport=transport or conf.BROKER_TRANSPORT,
-                    insist=self.either('BROKER_INSIST', insist),
-                    ssl=self.either('BROKER_USE_SSL', ssl),
-                    connect_timeout=self.either(
-                        'BROKER_CONNECTION_TIMEOUT', connect_timeout),
-                    heartbeat=heartbeat,
-                    transport_options=dict(conf.BROKER_TRANSPORT_OPTIONS,
-                                           **transport_options or {}))
+            hostname or conf.BROKER_HOST,
+            userid or conf.BROKER_USER,
+            password or conf.BROKER_PASSWORD,
+            virtual_host or conf.BROKER_VHOST,
+            port or conf.BROKER_PORT,
+            transport=transport or conf.BROKER_TRANSPORT,
+            insist=self.either('BROKER_INSIST', insist),
+            ssl=self.either('BROKER_USE_SSL', ssl),
+            connect_timeout=self.either(
+                'BROKER_CONNECTION_TIMEOUT', connect_timeout),
+            heartbeat=heartbeat,
+            transport_options=dict(conf.BROKER_TRANSPORT_OPTIONS,
+                                   **transport_options or {}))
     broker_connection = connection
     broker_connection = connection
 
 
     @contextmanager
     @contextmanager
     def connection_or_acquire(self, connection=None, pool=True,
     def connection_or_acquire(self, connection=None, pool=True,
-            *args, **kwargs):
+                              *args, **kwargs):
         if connection:
         if connection:
             yield connection
             yield connection
         else:
         else:
@@ -299,15 +303,17 @@ class Celery(object):
     def mail_admins(self, subject, body, fail_silently=False):
     def mail_admins(self, subject, body, fail_silently=False):
         if self.conf.ADMINS:
         if self.conf.ADMINS:
             to = [admin_email for _, admin_email in self.conf.ADMINS]
             to = [admin_email for _, admin_email in self.conf.ADMINS]
-            return self.loader.mail_admins(subject, body, fail_silently, to=to,
-                                       sender=self.conf.SERVER_EMAIL,
-                                       host=self.conf.EMAIL_HOST,
-                                       port=self.conf.EMAIL_PORT,
-                                       user=self.conf.EMAIL_HOST_USER,
-                                       password=self.conf.EMAIL_HOST_PASSWORD,
-                                       timeout=self.conf.EMAIL_TIMEOUT,
-                                       use_ssl=self.conf.EMAIL_USE_SSL,
-                                       use_tls=self.conf.EMAIL_USE_TLS)
+            return self.loader.mail_admins(
+                subject, body, fail_silently, to=to,
+                sender=self.conf.SERVER_EMAIL,
+                host=self.conf.EMAIL_HOST,
+                port=self.conf.EMAIL_PORT,
+                user=self.conf.EMAIL_HOST_USER,
+                password=self.conf.EMAIL_HOST_PASSWORD,
+                timeout=self.conf.EMAIL_TIMEOUT,
+                use_ssl=self.conf.EMAIL_USE_SSL,
+                use_tls=self.conf.EMAIL_USE_TLS,
+            )
 
 
     def select_queues(self, queues=None):
     def select_queues(self, queues=None):
         return self.amqp.queues.select_subset(queues)
         return self.amqp.queues.select_subset(queues)
@@ -323,14 +329,14 @@ class Celery(object):
     def _get_backend(self):
     def _get_backend(self):
         from celery.backends import get_backend_by_url
         from celery.backends import get_backend_by_url
         backend, url = get_backend_by_url(
         backend, url = get_backend_by_url(
-                self.backend_cls or self.conf.CELERY_RESULT_BACKEND,
-                self.loader)
+            self.backend_cls or self.conf.CELERY_RESULT_BACKEND,
+            self.loader)
         return backend(app=self, url=url)
         return backend(app=self, url=url)
 
 
     def _get_config(self):
     def _get_config(self):
         self.configured = True
         self.configured = True
         s = Settings({}, [self.prepare_config(self.loader.conf),
         s = Settings({}, [self.prepare_config(self.loader.conf),
-                             deepcopy(DEFAULTS)])
+                          deepcopy(DEFAULTS)])
 
 
         # load lazy config dict initializers.
         # load lazy config dict initializers.
         pending = self._pending_defaults
         pending = self._pending_defaults
@@ -360,7 +366,7 @@ class Celery(object):
                                        attribute='_app', abstract=True)
                                        attribute='_app', abstract=True)
 
 
     def subclass_with_self(self, Class, name=None, attribute='app',
     def subclass_with_self(self, Class, name=None, attribute='app',
-            reverse=None, **kw):
+                           reverse=None, **kw):
         """Subclass an app-compatible class by setting its app attribute
         """Subclass an app-compatible class by setting its app attribute
         to be this app instance.
         to be this app instance.
 
 
@@ -396,8 +402,10 @@ class Celery(object):
         # Reduce only pickles the configuration changes,
         # Reduce only pickles the configuration changes,
         # so the default configuration doesn't have to be passed
         # so the default configuration doesn't have to be passed
         # between processes.
         # between processes.
-        return (_unpickle_app, (self.__class__, self.Pickler)
-                              + self.__reduce_args__())
+        return (
+            _unpickle_app,
+            (self.__class__, self.Pickler) + self.__reduce_args__(),
+        )
 
 
     def __reduce_args__(self):
     def __reduce_args__(self):
         return (self.main, self.conf.changes, self.loader_cls,
         return (self.main, self.conf.changes, self.loader_cls,

+ 20 - 16
celery/app/builtins.py

@@ -73,7 +73,7 @@ def add_unlock_chord_task(app):
     @app.task(name='celery.chord_unlock', max_retries=None,
     @app.task(name='celery.chord_unlock', max_retries=None,
               default_retry_delay=1, ignore_result=True, _force_evaluate=True)
               default_retry_delay=1, ignore_result=True, _force_evaluate=True)
     def unlock_chord(group_id, callback, interval=None, propagate=False,
     def unlock_chord(group_id, callback, interval=None, propagate=False,
-            max_retries=None, result=None):
+                     max_retries=None, result=None):
         if interval is None:
         if interval is None:
             interval = unlock_chord.default_retry_delay
             interval = unlock_chord.default_retry_delay
         result = _res.GroupResult(group_id, map(_res.AsyncResult, result))
         result = _res.GroupResult(group_id, map(_res.AsyncResult, result))
@@ -134,10 +134,12 @@ def add_group_task(app):
             result = from_serializable(result)
             result = from_serializable(result)
             # any partial args are added to all tasks in the group
             # any partial args are added to all tasks in the group
             taskit = (subtask(task).clone(partial_args)
             taskit = (subtask(task).clone(partial_args)
-                        for i, task in enumerate(tasks))
+                      for i, task in enumerate(tasks))
             if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER:
             if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER:
-                return app.GroupResult(result.id,
-                        [task.apply(group_id=group_id) for task in taskit])
+                return app.GroupResult(
+                    result.id,
+                    [task.apply(group_id=group_id) for task in taskit],
+                )
             with app.producer_or_acquire() as pub:
             with app.producer_or_acquire() as pub:
                 [task.apply_async(group_id=group_id, publisher=pub,
                 [task.apply_async(group_id=group_id, publisher=pub,
                                   add_to_parent=False) for task in taskit]
                                   add_to_parent=False) for task in taskit]
@@ -148,8 +150,8 @@ def add_group_task(app):
 
 
         def prepare(self, options, tasks, args, **kwargs):
         def prepare(self, options, tasks, args, **kwargs):
             AsyncResult = self.AsyncResult
             AsyncResult = self.AsyncResult
-            options['group_id'] = group_id = \
-                    options.setdefault('task_id', uuid())
+            options['group_id'] = group_id = (
+                options.setdefault('task_id', uuid()))
 
 
             def prepare_member(task):
             def prepare_member(task):
                 task = maybe_subtask(task)
                 task = maybe_subtask(task)
@@ -171,16 +173,18 @@ def add_group_task(app):
         def apply_async(self, partial_args=(), kwargs={}, **options):
         def apply_async(self, partial_args=(), kwargs={}, **options):
             if self.app.conf.CELERY_ALWAYS_EAGER:
             if self.app.conf.CELERY_ALWAYS_EAGER:
                 return self.apply(partial_args, kwargs, **options)
                 return self.apply(partial_args, kwargs, **options)
-            tasks, result, gid, args = self.prepare(options,
-                                            args=partial_args, **kwargs)
-            super(Group, self).apply_async((list(tasks),
-                result.serializable(), gid, args), **options)
+            tasks, result, gid, args = self.prepare(
+                options, args=partial_args, **kwargs
+            )
+            super(Group, self).apply_async((
+                list(tasks), result.serializable(), gid, args), **options
+            )
             return result
             return result
 
 
         def apply(self, args=(), kwargs={}, **options):
         def apply(self, args=(), kwargs={}, **options):
             return super(Group, self).apply(
             return super(Group, self).apply(
-                    self.prepare(options, args=args, **kwargs),
-                    **options).get()
+                self.prepare(options, args=args, **kwargs),
+                **options).get()
     return Group
     return Group
 
 
 
 
@@ -229,7 +233,7 @@ def add_chain_task(app):
             return tasks, results
             return tasks, results
 
 
         def apply_async(self, args=(), kwargs={}, group_id=None, chord=None,
         def apply_async(self, args=(), kwargs={}, group_id=None, chord=None,
-                task_id=None, **options):
+                        task_id=None, **options):
             if self.app.conf.CELERY_ALWAYS_EAGER:
             if self.app.conf.CELERY_ALWAYS_EAGER:
                 return self.apply(args, kwargs, **options)
                 return self.apply(args, kwargs, **options)
             options.pop('publisher', None)
             options.pop('publisher', None)
@@ -283,7 +287,7 @@ def add_chord_task(app):
                 return header.apply(args=partial_args, task_id=group_id)
                 return header.apply(args=partial_args, task_id=group_id)
 
 
             results = [AsyncResult(prepare_member(task, body, group_id))
             results = [AsyncResult(prepare_member(task, body, group_id))
-                            for task in header.tasks]
+                       for task in header.tasks]
 
 
             # - fallback implementations schedules the chord_unlock task here
             # - fallback implementations schedules the chord_unlock task here
             app.backend.on_chord_apply(group_id, body,
             app.backend.on_chord_apply(group_id, body,
@@ -320,7 +324,7 @@ def add_chord_task(app):
                 body.set(chord=chord)
                 body.set(chord=chord)
             callback_id = body.options.setdefault('task_id', task_id or uuid())
             callback_id = body.options.setdefault('task_id', task_id or uuid())
             parent = super(Chord, self).apply_async((header, body, args),
             parent = super(Chord, self).apply_async((header, body, args),
-                                                     kwargs, **options)
+                                                    kwargs, **options)
             body_result = self.AsyncResult(callback_id)
             body_result = self.AsyncResult(callback_id)
             body_result.parent = parent
             body_result.parent = parent
             return body_result
             return body_result
@@ -330,5 +334,5 @@ def add_chord_task(app):
             res = super(Chord, self).apply(args, dict(kwargs, eager=True),
             res = super(Chord, self).apply(args, dict(kwargs, eager=True),
                                            **options)
                                            **options)
             return maybe_subtask(body).apply(
             return maybe_subtask(body).apply(
-                        args=(res.get(propagate=propagate).get(), ))
+                args=(res.get(propagate=propagate).get(), ))
     return Chord
     return Chord

+ 32 - 26
celery/app/control.py

@@ -27,7 +27,7 @@ class Inspect(object):
     app = None
     app = None
 
 
     def __init__(self, destination=None, timeout=1, callback=None,
     def __init__(self, destination=None, timeout=1, callback=None,
-            connection=None, app=None, limit=None):
+                 connection=None, app=None, limit=None):
         self.app = app or self.app
         self.app = app or self.app
         self.destination = destination
         self.destination = destination
         self.timeout = timeout
         self.timeout = timeout
@@ -45,13 +45,15 @@ class Inspect(object):
         return by_node
         return by_node
 
 
     def _request(self, command, **kwargs):
     def _request(self, command, **kwargs):
-        return self._prepare(self.app.control.broadcast(command,
-                                      arguments=kwargs,
-                                      destination=self.destination,
-                                      callback=self.callback,
-                                      connection=self.connection,
-                                      limit=self.limit,
-                                      timeout=self.timeout, reply=True))
+        return self._prepare(self.app.control.broadcast(
+            command,
+            arguments=kwargs,
+            destination=self.destination,
+            callback=self.callback,
+            connection=self.connection,
+            limit=self.limit,
+            timeout=self.timeout, reply=True,
+        ))
 
 
     def report(self):
     def report(self):
         return self._request('report')
         return self._request('report')
@@ -107,7 +109,7 @@ class Control(object):
     discard_all = purge
     discard_all = purge
 
 
     def revoke(self, task_id, destination=None, terminate=False,
     def revoke(self, task_id, destination=None, terminate=False,
-            signal='SIGTERM', **kwargs):
+               signal='SIGTERM', **kwargs):
         """Tell all (or specific) workers to revoke a task by id.
         """Tell all (or specific) workers to revoke a task by id.
 
 
         If a task is revoked, the workers will ignore the task and
         If a task is revoked, the workers will ignore the task and
@@ -157,7 +159,7 @@ class Control(object):
                               **kwargs)
                               **kwargs)
 
 
     def add_consumer(self, queue, exchange=None, exchange_type='direct',
     def add_consumer(self, queue, exchange=None, exchange_type='direct',
-            routing_key=None, options=None, **kwargs):
+                     routing_key=None, options=None, **kwargs):
         """Tell all (or specific) workers to start consuming from a new queue.
         """Tell all (or specific) workers to start consuming from a new queue.
 
 
         Only the queue name is required as if only the queue is specified
         Only the queue name is required as if only the queue is specified
@@ -180,11 +182,13 @@ class Control(object):
         See :meth:`broadcast` for supported keyword arguments.
         See :meth:`broadcast` for supported keyword arguments.
 
 
         """
         """
-        return self.broadcast('add_consumer',
-                arguments=dict({'queue': queue, 'exchange': exchange,
-                                'exchange_type': exchange_type,
-                                'routing_key': routing_key}, **options or {}),
-                **kwargs)
+        return self.broadcast(
+            'add_consumer',
+            arguments=dict({'queue': queue, 'exchange': exchange,
+                            'exchange_type': exchange_type,
+                            'routing_key': routing_key}, **options or {}),
+            **kwargs
+        )
 
 
     def cancel_consumer(self, queue, **kwargs):
     def cancel_consumer(self, queue, **kwargs):
         """Tell all (or specific) workers to stop consuming from ``queue``.
         """Tell all (or specific) workers to stop consuming from ``queue``.
@@ -192,8 +196,9 @@ class Control(object):
         Supports the same keyword arguments as :meth:`broadcast`.
         Supports the same keyword arguments as :meth:`broadcast`.
 
 
         """
         """
-        return self.broadcast('cancel_consumer',
-                arguments={'queue': queue}, **kwargs)
+        return self.broadcast(
+            'cancel_consumer', arguments={'queue': queue}, **kwargs
+        )
 
 
     def time_limit(self, task_name, soft=None, hard=None, **kwargs):
     def time_limit(self, task_name, soft=None, hard=None, **kwargs):
         """Tell all (or specific) workers to set time limits for
         """Tell all (or specific) workers to set time limits for
@@ -206,9 +211,10 @@ class Control(object):
         Any additional keyword arguments are passed on to :meth:`broadcast`.
         Any additional keyword arguments are passed on to :meth:`broadcast`.
 
 
         """
         """
-        return self.broadcast('time_limit',
-                              arguments={'task_name': task_name,
-                                         'hard': hard, 'soft': soft}, **kwargs)
+        return self.broadcast(
+            'time_limit',
+            arguments={'task_name': task_name,
+                       'hard': hard, 'soft': soft}, **kwargs)
 
 
     def enable_events(self, destination=None, **kwargs):
     def enable_events(self, destination=None, **kwargs):
         """Tell all (or specific) workers to enable events."""
         """Tell all (or specific) workers to enable events."""
@@ -235,8 +241,8 @@ class Control(object):
         return self.broadcast('pool_shrink', {}, destination, **kwargs)
         return self.broadcast('pool_shrink', {}, destination, **kwargs)
 
 
     def broadcast(self, command, arguments=None, destination=None,
     def broadcast(self, command, arguments=None, destination=None,
-            connection=None, reply=False, timeout=1, limit=None,
-            callback=None, channel=None, **extra_kwargs):
+                  connection=None, reply=False, timeout=1, limit=None,
+                  callback=None, channel=None, **extra_kwargs):
         """Broadcast a control command to the celery workers.
         """Broadcast a control command to the celery workers.
 
 
         :param command: Name of command to send.
         :param command: Name of command to send.
@@ -254,7 +260,7 @@ class Control(object):
         """
         """
         with self.app.connection_or_acquire(connection) as conn:
         with self.app.connection_or_acquire(connection) as conn:
             arguments = dict(arguments or {}, **extra_kwargs)
             arguments = dict(arguments or {}, **extra_kwargs)
-            return self.mailbox(conn)._broadcast(command, arguments,
-                                                 destination, reply, timeout,
-                                                 limit, callback,
-                                                 channel=channel)
+            return self.mailbox(conn)._broadcast(
+                command, arguments, destination, reply, timeout,
+                limit, callback, channel=channel,
+            )

+ 15 - 12
celery/app/defaults.py

@@ -91,11 +91,13 @@ NAMESPACES = {
     'CELERY': {
     'CELERY': {
         'ACKS_LATE': Option(False, type='bool'),
         'ACKS_LATE': Option(False, type='bool'),
         'ALWAYS_EAGER': Option(False, type='bool'),
         'ALWAYS_EAGER': Option(False, type='bool'),
-        'AMQP_TASK_RESULT_EXPIRES': Option(type='float',
-                deprecate_by='2.5', remove_by='4.0',
-                alt='CELERY_TASK_RESULT_EXPIRES'),
-        'AMQP_TASK_RESULT_CONNECTION_MAX': Option(1, type='int',
-                remove_by='2.5', alt='BROKER_POOL_LIMIT'),
+        'AMQP_TASK_RESULT_EXPIRES': Option(
+            type='float', deprecate_by='2.5', remove_by='4.0',
+            alt='CELERY_TASK_RESULT_EXPIRES'
+        ),
+        'AMQP_TASK_RESULT_CONNECTION_MAX': Option(
+            1, type='int', remove_by='2.5', alt='BROKER_POOL_LIMIT',
+        ),
         'ANNOTATIONS': Option(type='any'),
         'ANNOTATIONS': Option(type='any'),
         'BROADCAST_QUEUE': Option('celeryctl'),
         'BROADCAST_QUEUE': Option('celeryctl'),
         'BROADCAST_EXCHANGE': Option('celeryctl'),
         'BROADCAST_EXCHANGE': Option('celeryctl'),
@@ -137,14 +139,15 @@ NAMESPACES = {
         'SEND_TASK_ERROR_EMAILS': Option(False, type='bool'),
         'SEND_TASK_ERROR_EMAILS': Option(False, type='bool'),
         'SEND_TASK_SENT_EVENT': Option(False, type='bool'),
         'SEND_TASK_SENT_EVENT': Option(False, type='bool'),
         'STORE_ERRORS_EVEN_IF_IGNORED': Option(False, type='bool'),
         'STORE_ERRORS_EVEN_IF_IGNORED': Option(False, type='bool'),
-        'TASK_ERROR_WHITELIST': Option((), type='tuple',
-            deprecate_by='2.5', remove_by='4.0'),
+        'TASK_ERROR_WHITELIST': Option(
+            (), type='tuple', deprecate_by='2.5', remove_by='4.0',
+        ),
         'TASK_PUBLISH_RETRY': Option(True, type='bool'),
         'TASK_PUBLISH_RETRY': Option(True, type='bool'),
         'TASK_PUBLISH_RETRY_POLICY': Option({
         'TASK_PUBLISH_RETRY_POLICY': Option({
-                'max_retries': 100,
-                'interval_start': 0,
-                'interval_max': 1,
-                'interval_step': 0.2}, type='dict'),
+            'max_retries': 100,
+            'interval_start': 0,
+            'interval_max': 1,
+            'interval_step': 0.2}, type='dict'),
         'TASK_RESULT_EXPIRES': Option(timedelta(days=1), type='float'),
         'TASK_RESULT_EXPIRES': Option(timedelta(days=1), type='float'),
         'TASK_SERIALIZER': Option('pickle'),
         'TASK_SERIALIZER': Option('pickle'),
         'TIMEZONE': Option(type='string'),
         'TIMEZONE': Option(type='string'),
@@ -173,7 +176,7 @@ NAMESPACES = {
         'LOG_LEVEL': Option('WARN', deprecate_by='2.4', remove_by='4.0',
         'LOG_LEVEL': Option('WARN', deprecate_by='2.4', remove_by='4.0',
                             alt='--loglevel argument'),
                             alt='--loglevel argument'),
         'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0',
         'LOG_FILE': Option(deprecate_by='2.4', remove_by='4.0',
-                            alt='--logfile argument'),
+                           alt='--logfile argument'),
         'MEDIATOR': Option('celery.worker.mediator.Mediator'),
         'MEDIATOR': Option('celery.worker.mediator.Mediator'),
         'MAX_TASKS_PER_CHILD': Option(type='int'),
         'MAX_TASKS_PER_CHILD': Option(type='int'),
         'POOL': Option(DEFAULT_POOL),
         'POOL': Option(DEFAULT_POOL),

+ 34 - 24
celery/app/log.py

@@ -60,7 +60,7 @@ class Logging(object):
         self.colorize = self.app.conf.CELERYD_LOG_COLOR
         self.colorize = self.app.conf.CELERYD_LOG_COLOR
 
 
     def setup(self, loglevel=None, logfile=None, redirect_stdouts=False,
     def setup(self, loglevel=None, logfile=None, redirect_stdouts=False,
-            redirect_level='WARNING', colorize=None):
+              redirect_level='WARNING', colorize=None):
         handled = self.setup_logging_subsystem(
         handled = self.setup_logging_subsystem(
             loglevel, logfile, colorize=colorize,
             loglevel, logfile, colorize=colorize,
         )
         )
@@ -68,15 +68,16 @@ class Logging(object):
             logger = get_logger('celery.redirected')
             logger = get_logger('celery.redirected')
             if redirect_stdouts:
             if redirect_stdouts:
                 self.redirect_stdouts_to_logger(logger,
                 self.redirect_stdouts_to_logger(logger,
-                                loglevel=redirect_level)
+                                                loglevel=redirect_level)
         os.environ.update(
         os.environ.update(
             CELERY_LOG_LEVEL=str(loglevel) if loglevel else '',
             CELERY_LOG_LEVEL=str(loglevel) if loglevel else '',
             CELERY_LOG_FILE=str(logfile) if logfile else '',
             CELERY_LOG_FILE=str(logfile) if logfile else '',
             CELERY_LOG_REDIRECT='1' if redirect_stdouts else '',
             CELERY_LOG_REDIRECT='1' if redirect_stdouts else '',
-            CELERY_LOG_REDIRECT_LEVEL=str(redirect_level))
+            CELERY_LOG_REDIRECT_LEVEL=str(redirect_level),
+        )
 
 
     def setup_logging_subsystem(self, loglevel=None, logfile=None,
     def setup_logging_subsystem(self, loglevel=None, logfile=None,
-            format=None, colorize=None, **kwargs):
+                                format=None, colorize=None, **kwargs):
         if Logging._setup:
         if Logging._setup:
             return
             return
         Logging._setup = True
         Logging._setup = True
@@ -86,9 +87,10 @@ class Logging(object):
         reset_multiprocessing_logger()
         reset_multiprocessing_logger()
         if not is_py3k:
         if not is_py3k:
             ensure_process_aware_logger()
             ensure_process_aware_logger()
-        receivers = signals.setup_logging.send(sender=None,
-                        loglevel=loglevel, logfile=logfile,
-                        format=format, colorize=colorize)
+        receivers = signals.setup_logging.send(
+            sender=None, loglevel=loglevel, logfile=logfile,
+            format=format, colorize=colorize,
+        )
         if not receivers:
         if not receivers:
             root = logging.getLogger()
             root = logging.getLogger()
 
 
@@ -100,22 +102,26 @@ class Logging(object):
                                     colorize, **kwargs)
                                     colorize, **kwargs)
                 if loglevel:
                 if loglevel:
                     logger.setLevel(loglevel)
                     logger.setLevel(loglevel)
-                signals.after_setup_logger.send(sender=None, logger=logger,
-                                            loglevel=loglevel, logfile=logfile,
-                                            format=format, colorize=colorize)
+                signals.after_setup_logger.send(
+                    sender=None, logger=logger,
+                    loglevel=loglevel, logfile=logfile,
+                    format=format, colorize=colorize,
+                )
             # then setup the root task logger.
             # then setup the root task logger.
             self.setup_task_loggers(loglevel, logfile, colorize=colorize)
             self.setup_task_loggers(loglevel, logfile, colorize=colorize)
 
 
         # This is a hack for multiprocessing's fork+exec, so that
         # This is a hack for multiprocessing's fork+exec, so that
         # logging before Process.run works.
         # logging before Process.run works.
         logfile_name = logfile if isinstance(logfile, basestring) else ''
         logfile_name = logfile if isinstance(logfile, basestring) else ''
-        os.environ.update(_MP_FORK_LOGLEVEL_=str(loglevel),
-                          _MP_FORK_LOGFILE_=logfile_name,
-                          _MP_FORK_LOGFORMAT_=format)
+        os.environ.update(
+            _MP_FORK_LOGLEVEL_=str(loglevel),
+            _MP_FORK_LOGFILE_=logfile_name,
+            _MP_FORK_LOGFORMAT_=format,
+        )
         return receivers
         return receivers
 
 
     def setup_task_loggers(self, loglevel=None, logfile=None, format=None,
     def setup_task_loggers(self, loglevel=None, logfile=None, format=None,
-            colorize=None, propagate=False, **kwargs):
+                           colorize=None, propagate=False, **kwargs):
         """Setup the task logger.
         """Setup the task logger.
 
 
         If `logfile` is not specified, then `sys.stderr` is used.
         If `logfile` is not specified, then `sys.stderr` is used.
@@ -127,19 +133,23 @@ class Logging(object):
         format = format or self.task_format
         format = format or self.task_format
         colorize = self.supports_color(colorize, logfile)
         colorize = self.supports_color(colorize, logfile)
 
 
-        logger = self.setup_handlers(get_logger('celery.task'),
-                                     logfile, format, colorize,
-                                     formatter=TaskFormatter, **kwargs)
+        logger = self.setup_handlers(
+            get_logger('celery.task'),
+            logfile, format, colorize,
+            formatter=TaskFormatter, **kwargs
+        )
         logger.setLevel(loglevel)
         logger.setLevel(loglevel)
         logger.propagate = int(propagate)    # this is an int for some reason.
         logger.propagate = int(propagate)    # this is an int for some reason.
                                              # better to not question why.
                                              # better to not question why.
-        signals.after_setup_task_logger.send(sender=None, logger=logger,
-                                     loglevel=loglevel, logfile=logfile,
-                                     format=format, colorize=colorize)
+        signals.after_setup_task_logger.send(
+            sender=None, logger=logger,
+            loglevel=loglevel, logfile=logfile,
+            format=format, colorize=colorize,
+        )
         return logger
         return logger
 
 
     def redirect_stdouts_to_logger(self, logger, loglevel=None,
     def redirect_stdouts_to_logger(self, logger, loglevel=None,
-            stdout=True, stderr=True):
+                                   stdout=True, stderr=True):
         """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
         """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
         logging instance.
         logging instance.
 
 
@@ -169,7 +179,7 @@ class Logging(object):
         return colored(enabled=self.supports_color(enabled, logfile))
         return colored(enabled=self.supports_color(enabled, logfile))
 
 
     def setup_handlers(self, logger, logfile, format, colorize,
     def setup_handlers(self, logger, logfile, format, colorize,
-            formatter=ColorFormatter, **kwargs):
+                       formatter=ColorFormatter, **kwargs):
         if self._is_configured(logger):
         if self._is_configured(logger):
             return logger
             return logger
         handler = self._detect_handler(logfile)
         handler = self._detect_handler(logfile)
@@ -187,11 +197,11 @@ class Logging(object):
 
 
     def _has_handler(self, logger):
     def _has_handler(self, logger):
         return (logger.handlers and
         return (logger.handlers and
-                    not isinstance(logger.handlers[0], NullHandler))
+                not isinstance(logger.handlers[0], NullHandler))
 
 
     def _is_configured(self, logger):
     def _is_configured(self, logger):
         return self._has_handler(logger) and not getattr(
         return self._has_handler(logger) and not getattr(
-                logger, '_rudimentary_setup', False)
+            logger, '_rudimentary_setup', False)
 
 
     def setup_logger(self, name='celery', *args, **kwargs):
     def setup_logger(self, name='celery', *args, **kwargs):
         """Deprecated: No longer used."""
         """Deprecated: No longer used."""

+ 1 - 1
celery/app/registry.py

@@ -52,7 +52,7 @@ class TaskRegistry(dict):
 
 
     def filter_types(self, type):
     def filter_types(self, type):
         return dict((name, task) for name, task in self.iteritems()
         return dict((name, task) for name, task in self.iteritems()
-                                if getattr(task, 'type', 'regular') == type)
+                    if getattr(task, 'type', 'regular') == type)
 
 
 
 
 def _unpickle_task(name):
 def _unpickle_task(name):

+ 2 - 2
celery/app/routes.py

@@ -31,8 +31,8 @@ class MapRoute(object):
 
 
 class Router(object):
 class Router(object):
 
 
-    def __init__(self, routes=None, queues=None, create_missing=False,
-            app=None):
+    def __init__(self, routes=None, queues=None,
+                 create_missing=False, app=None):
         self.app = app
         self.app = app
         self.queues = {} if queues is None else queues
         self.queues = {} if queues is None else queues
         self.routes = [] if routes is None else routes
         self.routes = [] if routes is None else routes

+ 14 - 12
celery/app/task.py

@@ -357,9 +357,9 @@ class Task(object):
         return self.apply_async(args, kwargs)
         return self.apply_async(args, kwargs)
 
 
     def apply_async(self, args=None, kwargs=None,
     def apply_async(self, args=None, kwargs=None,
-            task_id=None, producer=None, connection=None, router=None,
-            link=None, link_error=None, publisher=None, add_to_parent=True,
-            **options):
+                    task_id=None, producer=None, connection=None, router=None,
+                    link=None, link_error=None, publisher=None,
+                    add_to_parent=True, **options):
         """Apply tasks asynchronously by sending a message.
         """Apply tasks asynchronously by sending a message.
 
 
         :keyword args: The positional arguments to pass on to the
         :keyword args: The positional arguments to pass on to the
@@ -484,7 +484,7 @@ class Task(object):
         return result
         return result
 
 
     def subtask_from_request(self, request=None, args=None, kwargs=None,
     def subtask_from_request(self, request=None, args=None, kwargs=None,
-            **extra_options):
+                             **extra_options):
 
 
         request = self.request if request is None else request
         request = self.request if request is None else request
         args = request.args if args is None else args
         args = request.args if args is None else args
@@ -500,7 +500,7 @@ class Task(object):
         return self.subtask(args, kwargs, options, type=self, **extra_options)
         return self.subtask(args, kwargs, options, type=self, **extra_options)
 
 
     def retry(self, args=None, kwargs=None, exc=None, throw=True,
     def retry(self, args=None, kwargs=None, exc=None, throw=True,
-            eta=None, countdown=None, max_retries=None, **options):
+              eta=None, countdown=None, max_retries=None, **options):
         """Retry the task.
         """Retry the task.
 
 
         :param args: Positional arguments to retry with.
         :param args: Positional arguments to retry with.
@@ -557,15 +557,17 @@ class Task(object):
         if not eta and countdown is None:
         if not eta and countdown is None:
             countdown = self.default_retry_delay
             countdown = self.default_retry_delay
 
 
-        S = self.subtask_from_request(request, args, kwargs,
-            countdown=countdown, eta=eta, retries=retries)
+        S = self.subtask_from_request(
+            request, args, kwargs,
+            countdown=countdown, eta=eta, retries=retries,
+        )
 
 
         if max_retries is not None and retries > max_retries:
         if max_retries is not None and retries > max_retries:
             if exc:
             if exc:
                 maybe_reraise()
                 maybe_reraise()
             raise self.MaxRetriesExceededError(
             raise self.MaxRetriesExceededError(
-                    """Can't retry %s[%s] args:%s kwargs:%s""" % (
-                        self.name, request.id, S.args, S.kwargs))
+                """Can't retry %s[%s] args:%s kwargs:%s""" % (
+                    self.name, request.id, S.args, S.kwargs))
 
 
         # If task was executed eagerly using apply(),
         # If task was executed eagerly using apply(),
         # then the retry must also be executed eagerly.
         # then the retry must also be executed eagerly.
@@ -620,8 +622,8 @@ class Task(object):
                               'delivery_info': {'is_eager': True}}
                               'delivery_info': {'is_eager': True}}
             supported_keys = fun_takes_kwargs(task.run, default_kwargs)
             supported_keys = fun_takes_kwargs(task.run, default_kwargs)
             extend_with = dict((key, val)
             extend_with = dict((key, val)
-                                    for key, val in default_kwargs.items()
-                                        if key in supported_keys)
+                               for key, val in default_kwargs.items()
+                               if key in supported_keys)
             kwargs.update(extend_with)
             kwargs.update(extend_with)
 
 
         tb = None
         tb = None
@@ -639,7 +641,7 @@ class Task(object):
 
 
         """
         """
         return self._get_app().AsyncResult(task_id, backend=self.backend,
         return self._get_app().AsyncResult(task_id, backend=self.backend,
-                                                    task_name=self.name)
+                                           task_name=self.name)
 
 
     def subtask(self, *args, **kwargs):
     def subtask(self, *args, **kwargs):
         """Returns :class:`~celery.subtask` object for
         """Returns :class:`~celery.subtask` object for

+ 4 - 3
celery/app/utils.py

@@ -95,8 +95,9 @@ class Settings(datastructures.ConfigurationView):
     def humanize(self):
     def humanize(self):
         """Returns a human readable string showing changes to the
         """Returns a human readable string showing changes to the
         configuration."""
         configuration."""
-        return '\n'.join('%s %s' % (key + ':', pretty(value, width=50))
-                        for key, value in self.without_defaults().iteritems())
+        return '\n'.join(
+            '%s %s' % (key + ':', pretty(value, width=50))
+            for key, value in self.without_defaults().iteritems())
 
 
 
 
 class AppPickler(object):
 class AppPickler(object):
@@ -115,7 +116,7 @@ class AppPickler(object):
         return self.build_standard_kwargs(*args)
         return self.build_standard_kwargs(*args)
 
 
     def build_standard_kwargs(self, main, changes, loader, backend, amqp,
     def build_standard_kwargs(self, main, changes, loader, backend, amqp,
-            events, log, control, accept_magic_kwargs):
+                              events, log, control, accept_magic_kwargs):
         return dict(main=main, loader=loader, backend=backend, amqp=amqp,
         return dict(main=main, loader=loader, backend=backend, amqp=amqp,
                     changes=changes, events=events, log=log, control=control,
                     changes=changes, events=events, log=log, control=control,
                     set_as_current=False,
                     set_as_current=False,

+ 5 - 4
celery/apps/beat.py

@@ -47,7 +47,7 @@ class Beat(configurated):
     redirect_stdouts_level = from_config()
     redirect_stdouts_level = from_config()
 
 
     def __init__(self, max_interval=None, app=None,
     def __init__(self, max_interval=None, app=None,
-            socket_timeout=30, pidfile=None, no_color=None, **kwargs):
+                 socket_timeout=30, pidfile=None, no_color=None, **kwargs):
         """Starts the celerybeat task scheduler."""
         """Starts the celerybeat task scheduler."""
         self.app = app = app_or_default(app or self.app)
         self.app = app = app_or_default(app or self.app)
         self.setup_defaults(kwargs, namespace='celerybeat')
         self.setup_defaults(kwargs, namespace='celerybeat')
@@ -66,7 +66,7 @@ class Beat(configurated):
 
 
     def run(self):
     def run(self):
         print(str(self.colored.cyan(
         print(str(self.colored.cyan(
-                    'celerybeat v%s is starting.' % VERSION_BANNER)))
+            'celerybeat v%s is starting.' % VERSION_BANNER)))
         self.init_loader()
         self.init_loader()
         self.set_process_title()
         self.set_process_title()
         self.start_scheduler()
         self.start_scheduler()
@@ -125,8 +125,9 @@ class Beat(configurated):
 
 
     def set_process_title(self):
     def set_process_title(self):
         arg_start = 'manage' in sys.argv[0] and 2 or 1
         arg_start = 'manage' in sys.argv[0] and 2 or 1
-        platforms.set_process_title('celerybeat',
-                               info=' '.join(sys.argv[arg_start:]))
+        platforms.set_process_title(
+            'celerybeat', info=' '.join(sys.argv[arg_start:]),
+        )
 
 
     def install_sync_handler(self, beat):
     def install_sync_handler(self, beat):
         """Install a `SIGTERM` + `SIGINT` handler that saves
         """Install a `SIGTERM` + `SIGINT` handler that saves

+ 21 - 17
celery/apps/worker.py

@@ -49,7 +49,7 @@ def active_thread_count():
     from threading import enumerate
     from threading import enumerate
     # must use .getName on Python 2.5
     # must use .getName on Python 2.5
     return sum(1 for t in enumerate()
     return sum(1 for t in enumerate()
-        if not t.getName().startswith('Dummy-'))
+               if not t.getName().startswith('Dummy-'))
 
 
 
 
 def safe_say(msg):
 def safe_say(msg):
@@ -107,9 +107,9 @@ class Worker(configurated):
     redirect_stdouts_level = from_config()
     redirect_stdouts_level = from_config()
 
 
     def __init__(self, hostname=None, purge=False, beat=False,
     def __init__(self, hostname=None, purge=False, beat=False,
-            queues=None, include=None, app=None, pidfile=None,
-            autoscale=None, autoreload=False, no_execv=False,
-            no_color=None, **kwargs):
+                 queues=None, include=None, app=None, pidfile=None,
+                 autoscale=None, autoreload=False, no_execv=False,
+                 no_color=None, **kwargs):
         self.app = app = app_or_default(app or self.app)
         self.app = app = app_or_default(app or self.app)
         self.hostname = hostname or socket.gethostname()
         self.hostname = hostname or socket.gethostname()
 
 
@@ -207,7 +207,7 @@ class Worker(configurated):
     def purge_messages(self):
     def purge_messages(self):
         count = self.app.control.purge()
         count = self.app.control.purge()
         print('purge: Erased %d %s from the queue.\n' % (
         print('purge: Erased %d %s from the queue.\n' % (
-                count, pluralize(count, 'message')))
+            count, pluralize(count, 'message')))
 
 
     def tasklist(self, include_builtins=True):
     def tasklist(self, include_builtins=True):
         tasks = self.app.tasks
         tasks = self.app.tasks
@@ -260,13 +260,15 @@ class Worker(configurated):
         return '\n'.join(banner) + '\n'
         return '\n'.join(banner) + '\n'
 
 
     def run_worker(self):
     def run_worker(self):
-        worker = self.WorkController(app=self.app,
-                    hostname=self.hostname,
-                    ready_callback=self.on_consumer_ready, beat=self.beat,
-                    autoscale=self.autoscale, autoreload=self.autoreload,
-                    no_execv=self.no_execv,
-                    pidfile=self.pidfile,
-                    **self.confopts_as_dict())
+        worker = self.WorkController(
+            app=self.app,
+            hostname=self.hostname,
+            ready_callback=self.on_consumer_ready, beat=self.beat,
+            autoscale=self.autoscale, autoreload=self.autoreload,
+            no_execv=self.no_execv,
+            pidfile=self.pidfile,
+            **self.confopts_as_dict()
+        )
         self.install_platform_tweaks(worker)
         self.install_platform_tweaks(worker)
         signals.worker_init.send(sender=worker)
         signals.worker_init.send(sender=worker)
         worker.start()
         worker.start()
@@ -298,13 +300,15 @@ class Worker(configurated):
         os.environ.setdefault('celery_dummy_proxy', 'set_by_celeryd')
         os.environ.setdefault('celery_dummy_proxy', 'set_by_celeryd')
 
 
     def set_process_status(self, info):
     def set_process_status(self, info):
-        return platforms.set_mp_process_title('celeryd',
-                info='%s (%s)' % (info, platforms.strargv(sys.argv)),
-                hostname=self.hostname)
+        return platforms.set_mp_process_title(
+            'celeryd',
+            info='%s (%s)' % (info, platforms.strargv(sys.argv)),
+            hostname=self.hostname,
+        )
 
 
 
 
-def _shutdown_handler(worker, sig='TERM', how='Warm', exc=SystemExit,
-        callback=None):
+def _shutdown_handler(worker, sig='TERM', how='Warm',
+                      exc=SystemExit, callback=None):
 
 
     def _handle_request(*args):
     def _handle_request(*args):
         set_in_sighandler(True)
         set_in_sighandler(True)

+ 1 - 1
celery/backends/__init__.py

@@ -45,7 +45,7 @@ def get_backend_cls(backend=None, loader=None):
         return symbol_by_name(backend, aliases)
         return symbol_by_name(backend, aliases)
     except ValueError, exc:
     except ValueError, exc:
         raise ValueError, ValueError(UNKNOWN_BACKEND % (
         raise ValueError, ValueError(UNKNOWN_BACKEND % (
-                    backend, exc)), sys.exc_info()[2]
+            backend, exc)), sys.exc_info()[2]
 
 
 
 
 def get_backend_by_url(backend=None, loader=None):
 def get_backend_by_url(backend=None, loader=None):

+ 25 - 22
celery/backends/amqp.py

@@ -49,21 +49,21 @@ class AMQPBackend(BaseDictBackend):
     supports_native_join = True
     supports_native_join = True
 
 
     retry_policy = {
     retry_policy = {
-            'max_retries': 20,
-            'interval_start': 0,
-            'interval_step': 1,
-            'interval_max': 1,
+        'max_retries': 20,
+        'interval_start': 0,
+        'interval_step': 1,
+        'interval_max': 1,
     }
     }
 
 
     def __init__(self, connection=None, exchange=None, exchange_type=None,
     def __init__(self, connection=None, exchange=None, exchange_type=None,
-            persistent=None, serializer=None, auto_delete=True,
-            **kwargs):
+                 persistent=None, serializer=None, auto_delete=True,
+                 **kwargs):
         super(AMQPBackend, self).__init__(**kwargs)
         super(AMQPBackend, self).__init__(**kwargs)
         conf = self.app.conf
         conf = self.app.conf
         self._connection = connection
         self._connection = connection
         self.queue_arguments = {}
         self.queue_arguments = {}
         self.persistent = (conf.CELERY_RESULT_PERSISTENT if persistent is None
         self.persistent = (conf.CELERY_RESULT_PERSISTENT if persistent is None
-                                                         else persistent)
+                           else persistent)
         delivery_mode = persistent and 'persistent' or 'transient'
         delivery_mode = persistent and 'persistent' or 'transient'
         exchange = exchange or conf.CELERY_RESULT_EXCHANGE
         exchange = exchange or conf.CELERY_RESULT_EXCHANGE
         exchange_type = exchange_type or conf.CELERY_RESULT_EXCHANGE_TYPE
         exchange_type = exchange_type or conf.CELERY_RESULT_EXCHANGE_TYPE
@@ -103,8 +103,9 @@ class AMQPBackend(BaseDictBackend):
         pass
         pass
 
 
     def _republish(self, channel, task_id, body, content_type,
     def _republish(self, channel, task_id, body, content_type,
-            content_encoding):
-        return Producer(channel).publish(body,
+                   content_encoding):
+        return Producer(channel).publish(
+            body,
             exchange=self.exchange,
             exchange=self.exchange,
             routing_key=task_id.replace('-', ''),
             routing_key=task_id.replace('-', ''),
             serializer=self.serializer,
             serializer=self.serializer,
@@ -130,7 +131,7 @@ class AMQPBackend(BaseDictBackend):
         return result
         return result
 
 
     def wait_for(self, task_id, timeout=None, cache=True, propagate=True,
     def wait_for(self, task_id, timeout=None, cache=True, propagate=True,
-            **kwargs):
+                 **kwargs):
         cached_meta = self._cache.get(task_id)
         cached_meta = self._cache.get(task_id)
         if cache and cached_meta and \
         if cache and cached_meta and \
                 cached_meta['status'] in states.READY_STATES:
                 cached_meta['status'] in states.READY_STATES:
@@ -232,31 +233,33 @@ class AMQPBackend(BaseDictBackend):
 
 
     def reload_task_result(self, task_id):
     def reload_task_result(self, task_id):
         raise NotImplementedError(
         raise NotImplementedError(
-                'reload_task_result is not supported by this backend.')
+            'reload_task_result is not supported by this backend.')
 
 
     def reload_group_result(self, task_id):
     def reload_group_result(self, task_id):
         """Reload group result, even if it has been previously fetched."""
         """Reload group result, even if it has been previously fetched."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'reload_group_result is not supported by this backend.')
+            'reload_group_result is not supported by this backend.')
 
 
     def save_group(self, group_id, result):
     def save_group(self, group_id, result):
         raise NotImplementedError(
         raise NotImplementedError(
-                'save_group is not supported by this backend.')
+            'save_group is not supported by this backend.')
 
 
     def restore_group(self, group_id, cache=True):
     def restore_group(self, group_id, cache=True):
         raise NotImplementedError(
         raise NotImplementedError(
-                'restore_group is not supported by this backend.')
+            'restore_group is not supported by this backend.')
 
 
     def delete_group(self, group_id):
     def delete_group(self, group_id):
         raise NotImplementedError(
         raise NotImplementedError(
-                'delete_group is not supported by this backend.')
+            'delete_group is not supported by this backend.')
 
 
     def __reduce__(self, args=(), kwargs={}):
     def __reduce__(self, args=(), kwargs={}):
-        kwargs.update(connection=self._connection,
-                      exchange=self.exchange.name,
-                      exchange_type=self.exchange.type,
-                      persistent=self.persistent,
-                      serializer=self.serializer,
-                      auto_delete=self.auto_delete,
-                      expires=self.expires)
+        kwargs.update(
+            connection=self._connection,
+            exchange=self.exchange.name,
+            exchange_type=self.exchange.type,
+            persistent=self.persistent,
+            serializer=self.serializer,
+            auto_delete=self.auto_delete,
+            expires=self.expires,
+        )
         return super(AMQPBackend, self).__reduce__(args, kwargs)
         return super(AMQPBackend, self).__reduce__(args, kwargs)

+ 24 - 23
celery/backends/base.py

@@ -30,9 +30,9 @@ from celery.exceptions import TimeoutError, TaskRevokedError
 from celery.result import from_serializable, GroupResult
 from celery.result import from_serializable, GroupResult
 from celery.utils import timeutils
 from celery.utils import timeutils
 from celery.utils.serialization import (
 from celery.utils.serialization import (
-        get_pickled_exception,
-        get_pickleable_exception,
-        create_exception_cls,
+    get_pickled_exception,
+    get_pickleable_exception,
+    create_exception_cls,
 )
 )
 
 
 EXCEPTION_ABLE_CODECS = frozenset(['pickle', 'yaml'])
 EXCEPTION_ABLE_CODECS = frozenset(['pickle', 'yaml'])
@@ -97,7 +97,7 @@ class BaseBackend(object):
     def store_result(self, task_id, result, status, traceback=None):
     def store_result(self, task_id, result, status, traceback=None):
         """Store the result and status of a task."""
         """Store the result and status of a task."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'store_result is not supported by this backend.')
+            'store_result is not supported by this backend.')
 
 
     def mark_as_started(self, task_id, **meta):
     def mark_as_started(self, task_id, **meta):
         """Mark a task as started"""
         """Mark a task as started"""
@@ -143,7 +143,7 @@ class BaseBackend(object):
 
 
     def forget(self, task_id):
     def forget(self, task_id):
         raise NotImplementedError('%s does not implement forget.' % (
         raise NotImplementedError('%s does not implement forget.' % (
-                    self.__class__))
+            self.__class__))
 
 
     def wait_for(self, task_id, timeout=None, propagate=True, interval=0.5):
     def wait_for(self, task_id, timeout=None, propagate=True, interval=0.5):
         """Wait for task and return its result.
         """Wait for task and return its result.
@@ -186,45 +186,45 @@ class BaseBackend(object):
     def get_status(self, task_id):
     def get_status(self, task_id):
         """Get the status of a task."""
         """Get the status of a task."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'get_status is not supported by this backend.')
+            'get_status is not supported by this backend.')
 
 
     def get_result(self, task_id):
     def get_result(self, task_id):
         """Get the result of a task."""
         """Get the result of a task."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'get_result is not supported by this backend.')
+            'get_result is not supported by this backend.')
 
 
     def get_children(self, task_id):
     def get_children(self, task_id):
         raise NotImplementedError(
         raise NotImplementedError(
-                'get_children is not supported by this backend.')
+            'get_children is not supported by this backend.')
 
 
     def get_traceback(self, task_id):
     def get_traceback(self, task_id):
         """Get the traceback for a failed task."""
         """Get the traceback for a failed task."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'get_traceback is not supported by this backend.')
+            'get_traceback is not supported by this backend.')
 
 
     def save_group(self, group_id, result):
     def save_group(self, group_id, result):
         """Store the result and status of a task."""
         """Store the result and status of a task."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'save_group is not supported by this backend.')
+            'save_group is not supported by this backend.')
 
 
     def restore_group(self, group_id, cache=True):
     def restore_group(self, group_id, cache=True):
         """Get the result of a group."""
         """Get the result of a group."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'restore_group is not supported by this backend.')
+            'restore_group is not supported by this backend.')
 
 
     def delete_group(self, group_id):
     def delete_group(self, group_id):
         raise NotImplementedError(
         raise NotImplementedError(
-                'delete_group is not supported by this backend.')
+            'delete_group is not supported by this backend.')
 
 
     def reload_task_result(self, task_id):
     def reload_task_result(self, task_id):
         """Reload task result, even if it has been previously fetched."""
         """Reload task result, even if it has been previously fetched."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'reload_task_result is not supported by this backend.')
+            'reload_task_result is not supported by this backend.')
 
 
     def reload_group_result(self, task_id):
     def reload_group_result(self, task_id):
         """Reload group result, even if it has been previously fetched."""
         """Reload group result, even if it has been previously fetched."""
         raise NotImplementedError(
         raise NotImplementedError(
-                'reload_group_result is not supported by this backend.')
+            'reload_group_result is not supported by this backend.')
 
 
     def on_chord_part_return(self, task, propagate=False):
     def on_chord_part_return(self, task, propagate=False):
         pass
         pass
@@ -249,7 +249,7 @@ class BaseDictBackend(BaseBackend):
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
         super(BaseDictBackend, self).__init__(*args, **kwargs)
         super(BaseDictBackend, self).__init__(*args, **kwargs)
         self._cache = LRUCache(limit=kwargs.get('max_cached_results') or
         self._cache = LRUCache(limit=kwargs.get('max_cached_results') or
-                                 self.app.conf.CELERY_MAX_CACHED_RESULTS)
+                               self.app.conf.CELERY_MAX_CACHED_RESULTS)
 
 
     def store_result(self, task_id, result, status, traceback=None, **kwargs):
     def store_result(self, task_id, result, status, traceback=None, **kwargs):
         """Store task result and status."""
         """Store task result and status."""
@@ -263,7 +263,7 @@ class BaseDictBackend(BaseBackend):
 
 
     def _forget(self, task_id):
     def _forget(self, task_id):
         raise NotImplementedError('%s does not implement forget.' % (
         raise NotImplementedError('%s does not implement forget.' % (
-                    self.__class__))
+            self.__class__))
 
 
     def get_status(self, task_id):
     def get_status(self, task_id):
         """Get the status of a task."""
         """Get the status of a task."""
@@ -382,13 +382,13 @@ class KeyValueStoreBackend(BaseDictBackend):
         if hasattr(values, 'items'):
         if hasattr(values, 'items'):
             # client returns dict so mapping preserved.
             # client returns dict so mapping preserved.
             return dict((self._strip_prefix(k), self.decode(v))
             return dict((self._strip_prefix(k), self.decode(v))
-                            for k, v in values.iteritems()
-                                if v is not None)
+                        for k, v in values.iteritems()
+                        if v is not None)
         else:
         else:
             # client returns list so need to recreate mapping.
             # client returns list so need to recreate mapping.
             return dict((bytes_to_str(keys[i]), self.decode(value))
             return dict((bytes_to_str(keys[i]), self.decode(value))
-                            for i, value in enumerate(values)
-                                if value is not None)
+                        for i, value in enumerate(values)
+                        if value is not None)
 
 
     def get_many(self, task_ids, timeout=None, interval=0.5):
     def get_many(self, task_ids, timeout=None, interval=0.5):
         ids = set(task_ids)
         ids = set(task_ids)
@@ -408,7 +408,7 @@ class KeyValueStoreBackend(BaseDictBackend):
         while ids:
         while ids:
             keys = list(ids)
             keys = list(ids)
             r = self._mget_to_results(self.mget([self.get_key_for_task(k)
             r = self._mget_to_results(self.mget([self.get_key_for_task(k)
-                                                    for k in keys]), keys)
+                                                 for k in keys]), keys)
             self._cache.update(r)
             self._cache.update(r)
             ids ^= set(map(bytes_to_str, r))
             ids ^= set(map(bytes_to_str, r))
             for key, value in r.iteritems():
             for key, value in r.iteritems():
@@ -487,6 +487,7 @@ class DisabledBackend(BaseBackend):
         pass
         pass
 
 
     def _is_disabled(self, *args, **kwargs):
     def _is_disabled(self, *args, **kwargs):
-        raise NotImplementedError('No result backend configured.  '
-                'Please see the documentation for more information.')
+        raise NotImplementedError(
+            'No result backend configured.  '
+            'Please see the documentation for more information.')
     wait_for = get_status = get_result = get_traceback = _is_disabled
     wait_for = get_status = get_result = get_traceback = _is_disabled

+ 8 - 7
celery/backends/cassandra.py

@@ -47,7 +47,7 @@ class CassandraBackend(BaseDictBackend):
     _retry_wait = 3
     _retry_wait = 3
 
 
     def __init__(self, servers=None, keyspace=None, column_family=None,
     def __init__(self, servers=None, keyspace=None, column_family=None,
-            cassandra_options=None, detailed_mode=False, **kwargs):
+                 cassandra_options=None, detailed_mode=False, **kwargs):
         """Initialize Cassandra backend.
         """Initialize Cassandra backend.
 
 
         Raises :class:`celery.exceptions.ImproperlyConfigured` if
         Raises :class:`celery.exceptions.ImproperlyConfigured` if
@@ -57,7 +57,7 @@ class CassandraBackend(BaseDictBackend):
         super(CassandraBackend, self).__init__(**kwargs)
         super(CassandraBackend, self).__init__(**kwargs)
 
 
         self.expires = kwargs.get('expires') or maybe_timedelta(
         self.expires = kwargs.get('expires') or maybe_timedelta(
-                                    self.app.conf.CELERY_TASK_RESULT_EXPIRES)
+            self.app.conf.CELERY_TASK_RESULT_EXPIRES)
 
 
         if not pycassa:
         if not pycassa:
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
@@ -94,7 +94,7 @@ class CassandraBackend(BaseDictBackend):
 
 
         if not self.servers or not self.keyspace or not self.column_family:
         if not self.servers or not self.keyspace or not self.column_family:
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
-                    'Cassandra backend not configured.')
+                'Cassandra backend not configured.')
 
 
         self._column_family = None
         self._column_family = None
 
 
@@ -119,10 +119,11 @@ class CassandraBackend(BaseDictBackend):
             conn = pycassa.ConnectionPool(self.keyspace,
             conn = pycassa.ConnectionPool(self.keyspace,
                                           server_list=self.servers,
                                           server_list=self.servers,
                                           **self.cassandra_options)
                                           **self.cassandra_options)
-            self._column_family = \
-              pycassa.ColumnFamily(conn, self.column_family,
-                    read_consistency_level=self.read_consistency,
-                    write_consistency_level=self.write_consistency)
+            self._column_family = pycassa.ColumnFamily(
+                conn, self.column_family,
+                read_consistency_level=self.read_consistency,
+                write_consistency_level=self.write_consistency,
+            )
         return self._column_family
         return self._column_family
 
 
     def process_cleanup(self):
     def process_cleanup(self):

+ 20 - 16
celery/backends/database/__init__.py

@@ -56,29 +56,33 @@ class DatabaseBackend(BaseDictBackend):
     subpolling_interval = 0.5
     subpolling_interval = 0.5
 
 
     def __init__(self, dburi=None, expires=None,
     def __init__(self, dburi=None, expires=None,
-            engine_options=None, **kwargs):
+                 engine_options=None, **kwargs):
         super(DatabaseBackend, self).__init__(**kwargs)
         super(DatabaseBackend, self).__init__(**kwargs)
         conf = self.app.conf
         conf = self.app.conf
         self.expires = maybe_timedelta(self.prepare_expires(expires))
         self.expires = maybe_timedelta(self.prepare_expires(expires))
         self.dburi = dburi or conf.CELERY_RESULT_DBURI
         self.dburi = dburi or conf.CELERY_RESULT_DBURI
-        self.engine_options = dict(engine_options or {},
-                        **conf.CELERY_RESULT_ENGINE_OPTIONS or {})
-        self.short_lived_sessions = kwargs.get('short_lived_sessions',
-                                    conf.CELERY_RESULT_DB_SHORT_LIVED_SESSIONS)
+        self.engine_options = dict(
+            engine_options or {},
+            **conf.CELERY_RESULT_ENGINE_OPTIONS or {})
+        self.short_lived_sessions = kwargs.get(
+            'short_lived_sessions',
+            conf.CELERY_RESULT_DB_SHORT_LIVED_SESSIONS,
+        )
         if not self.dburi:
         if not self.dburi:
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
-                    'Missing connection string! Do you have '
-                    'CELERY_RESULT_DBURI set to a real value?')
+                'Missing connection string! Do you have '
+                'CELERY_RESULT_DBURI set to a real value?')
 
 
     def ResultSession(self):
     def ResultSession(self):
         return ResultSession(
         return ResultSession(
-                    dburi=self.dburi,
-                    short_lived_sessions=self.short_lived_sessions,
-                    **self.engine_options)
+            dburi=self.dburi,
+            short_lived_sessions=self.short_lived_sessions,
+            **self.engine_options
+        )
 
 
     @retry
     @retry
-    def _store_result(self, task_id, result, status, traceback=None,
-            max_retries=3):
+    def _store_result(self, task_id, result, status,
+                      traceback=None, max_retries=3):
         """Store return value and status of an executed task."""
         """Store return value and status of an executed task."""
         session = self.ResultSession()
         session = self.ResultSession()
         try:
         try:
@@ -128,7 +132,7 @@ class DatabaseBackend(BaseDictBackend):
         session = self.ResultSession()
         session = self.ResultSession()
         try:
         try:
             group = session.query(TaskSet).filter(
             group = session.query(TaskSet).filter(
-                    TaskSet.taskset_id == group_id).first()
+                TaskSet.taskset_id == group_id).first()
             if group:
             if group:
                 return group.to_dict()
                 return group.to_dict()
         finally:
         finally:
@@ -140,7 +144,7 @@ class DatabaseBackend(BaseDictBackend):
         session = self.ResultSession()
         session = self.ResultSession()
         try:
         try:
             session.query(TaskSet).filter(
             session.query(TaskSet).filter(
-                    TaskSet.taskset_id == group_id).delete()
+                TaskSet.taskset_id == group_id).delete()
             session.flush()
             session.flush()
             session.commit()
             session.commit()
         finally:
         finally:
@@ -163,9 +167,9 @@ class DatabaseBackend(BaseDictBackend):
         now = self.app.now()
         now = self.app.now()
         try:
         try:
             session.query(Task).filter(
             session.query(Task).filter(
-                    Task.date_done < (now - expires)).delete()
+                Task.date_done < (now - expires)).delete()
             session.query(TaskSet).filter(
             session.query(TaskSet).filter(
-                    TaskSet.date_done < (now - expires)).delete()
+                TaskSet.date_done < (now - expires)).delete()
             session.commit()
             session.commit()
         finally:
         finally:
             session.close()
             session.close()

+ 2 - 2
celery/backends/database/dfd042c7.py

@@ -38,8 +38,8 @@ class PickleType(_PickleType):  # pragma: no cover
             return self.comparator(x, y)
             return self.comparator(x, y)
         elif self.mutable and not hasattr(x, '__eq__') and x is not None:
         elif self.mutable and not hasattr(x, '__eq__') and x is not None:
             util.warn_deprecated(
             util.warn_deprecated(
-                    'Objects stored with PickleType when mutable=True '
-                    'must implement __eq__() for reliable comparison.')
+                'Objects stored with PickleType when mutable=True '
+                'must implement __eq__() for reliable comparison.')
             a = self.pickler.dumps(x, self.protocol)
             a = self.pickler.dumps(x, self.protocol)
             b = self.pickler.dumps(y, self.protocol)
             b = self.pickler.dumps(y, self.protocol)
             return a == b
             return a == b

+ 3 - 3
celery/backends/database/models.py

@@ -35,7 +35,7 @@ class Task(ResultModelBase):
     status = sa.Column(sa.String(50), default=states.PENDING)
     status = sa.Column(sa.String(50), default=states.PENDING)
     result = sa.Column(PickleType, nullable=True)
     result = sa.Column(PickleType, nullable=True)
     date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
     date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
-                       onupdate=datetime.utcnow, nullable=True)
+                          onupdate=datetime.utcnow, nullable=True)
     traceback = sa.Column(sa.Text, nullable=True)
     traceback = sa.Column(sa.Text, nullable=True)
 
 
     def __init__(self, task_id):
     def __init__(self, task_id):
@@ -58,11 +58,11 @@ class TaskSet(ResultModelBase):
     __table_args__ = {'sqlite_autoincrement': True}
     __table_args__ = {'sqlite_autoincrement': True}
 
 
     id = sa.Column(sa.Integer, sa.Sequence('taskset_id_sequence'),
     id = sa.Column(sa.Integer, sa.Sequence('taskset_id_sequence'),
-                autoincrement=True, primary_key=True)
+                   autoincrement=True, primary_key=True)
     taskset_id = sa.Column(sa.String(255), unique=True)
     taskset_id = sa.Column(sa.String(255), unique=True)
     result = sa.Column(sa.PickleType, nullable=True)
     result = sa.Column(sa.PickleType, nullable=True)
     date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
     date_done = sa.Column(sa.DateTime, default=datetime.utcnow,
-                       nullable=True)
+                          nullable=True)
 
 
     def __init__(self, taskset_id, result):
     def __init__(self, taskset_id, result):
         self.taskset_id = taskset_id
         self.taskset_id = taskset_id

+ 3 - 3
celery/backends/mongodb.py

@@ -56,7 +56,7 @@ class MongoBackend(BaseDictBackend):
         """
         """
         super(MongoBackend, self).__init__(*args, **kwargs)
         super(MongoBackend, self).__init__(*args, **kwargs)
         self.expires = kwargs.get('expires') or maybe_timedelta(
         self.expires = kwargs.get('expires') or maybe_timedelta(
-                                    self.app.conf.CELERY_TASK_RESULT_EXPIRES)
+            self.app.conf.CELERY_TASK_RESULT_EXPIRES)
 
 
         if not pymongo:
         if not pymongo:
             raise ImproperlyConfigured(
             raise ImproperlyConfigured(
@@ -73,9 +73,9 @@ class MongoBackend(BaseDictBackend):
             self.mongodb_port = int(config.get('port', self.mongodb_port))
             self.mongodb_port = int(config.get('port', self.mongodb_port))
             self.mongodb_user = config.get('user', self.mongodb_user)
             self.mongodb_user = config.get('user', self.mongodb_user)
             self.mongodb_password = config.get(
             self.mongodb_password = config.get(
-                    'password', self.mongodb_password)
+                'password', self.mongodb_password)
             self.mongodb_database = config.get(
             self.mongodb_database = config.get(
-                    'database', self.mongodb_database)
+                'database', self.mongodb_database)
             self.mongodb_taskmeta_collection = config.get(
             self.mongodb_taskmeta_collection = config.get(
                 'taskmeta_collection', self.mongodb_taskmeta_collection)
                 'taskmeta_collection', self.mongodb_taskmeta_collection)
             self.mongodb_max_pool_size = config.get(
             self.mongodb_max_pool_size = config.get(

+ 1 - 1
celery/backends/redis.py

@@ -48,7 +48,7 @@ class RedisBackend(KeyValueStoreBackend):
     implements_incr = True
     implements_incr = True
 
 
     def __init__(self, host=None, port=None, db=None, password=None,
     def __init__(self, host=None, port=None, db=None, password=None,
-            expires=None, max_connections=None, url=None, **kwargs):
+                 expires=None, max_connections=None, url=None, **kwargs):
         super(RedisBackend, self).__init__(**kwargs)
         super(RedisBackend, self).__init__(**kwargs)
         conf = self.app.conf
         conf = self.app.conf
         if self.redis is None:
         if self.redis is None:

+ 29 - 23
celery/beat.py

@@ -78,8 +78,8 @@ class ScheduleEntry(object):
     total_run_count = 0
     total_run_count = 0
 
 
     def __init__(self, name=None, task=None, last_run_at=None,
     def __init__(self, name=None, task=None, last_run_at=None,
-            total_run_count=None, schedule=None, args=(), kwargs={},
-            options={}, relative=False):
+                 total_run_count=None, schedule=None, args=(), kwargs={},
+                 options={}, relative=False):
         self.name = name
         self.name = name
         self.task = task
         self.task = task
         self.args = args
         self.args = args
@@ -95,9 +95,11 @@ class ScheduleEntry(object):
     def _next_instance(self, last_run_at=None):
     def _next_instance(self, last_run_at=None):
         """Returns a new instance of the same class, but with
         """Returns a new instance of the same class, but with
         its date and count fields updated."""
         its date and count fields updated."""
-        return self.__class__(**dict(self,
-                                last_run_at=last_run_at or self._default_now(),
-                                total_run_count=self.total_run_count + 1))
+        return self.__class__(**dict(
+            self,
+            last_run_at=last_run_at or self._default_now(),
+            total_run_count=self.total_run_count + 1,
+        ))
     __next__ = next = _next_instance  # for 2to3
     __next__ = next = _next_instance  # for 2to3
 
 
     def update(self, other):
     def update(self, other):
@@ -119,9 +121,11 @@ class ScheduleEntry(object):
         return vars(self).iteritems()
         return vars(self).iteritems()
 
 
     def __repr__(self):
     def __repr__(self):
-        return ('<Entry: %s %s {%s}' % (self.name,
-                    reprcall(self.task, self.args or (), self.kwargs or {}),
-                    self.schedule))
+        return '<Entry: %s %s {%s}' % (
+            self.name,
+            reprcall(self.task, self.args or (), self.kwargs or {}),
+            self.schedule,
+        )
 
 
 
 
 class Scheduler(object):
 class Scheduler(object):
@@ -148,12 +152,12 @@ class Scheduler(object):
     logger = logger  # compat
     logger = logger  # compat
 
 
     def __init__(self, schedule=None, max_interval=None,
     def __init__(self, schedule=None, max_interval=None,
-            app=None, Publisher=None, lazy=False, **kwargs):
+                 app=None, Publisher=None, lazy=False, **kwargs):
         app = self.app = app_or_default(app)
         app = self.app = app_or_default(app)
         self.data = maybe_promise({} if schedule is None else schedule)
         self.data = maybe_promise({} if schedule is None else schedule)
         self.max_interval = (max_interval
         self.max_interval = (max_interval
-                                or app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
-                                or self.max_interval)
+                             or app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
+                             or self.max_interval)
         self.Publisher = Publisher or app.amqp.TaskProducer
         self.Publisher = Publisher or app.amqp.TaskProducer
         if not lazy:
         if not lazy:
             self.setup_schedule()
             self.setup_schedule()
@@ -163,9 +167,9 @@ class Scheduler(object):
         if self.app.conf.CELERY_TASK_RESULT_EXPIRES:
         if self.app.conf.CELERY_TASK_RESULT_EXPIRES:
             if 'celery.backend_cleanup' not in data:
             if 'celery.backend_cleanup' not in data:
                 entries['celery.backend_cleanup'] = {
                 entries['celery.backend_cleanup'] = {
-                        'task': 'celery.backend_cleanup',
-                        'schedule': crontab('0', '4', '*'),
-                        'options': {'expires': 12 * 3600}}
+                    'task': 'celery.backend_cleanup',
+                    'schedule': crontab('0', '4', '*'),
+                    'options': {'expires': 12 * 3600}}
         self.update_from_dict(entries)
         self.update_from_dict(entries)
 
 
     def maybe_due(self, entry, publisher=None):
     def maybe_due(self, entry, publisher=None):
@@ -262,8 +266,9 @@ class Scheduler(object):
         return self.Entry(**dict(entry, name=name))
         return self.Entry(**dict(entry, name=name))
 
 
     def update_from_dict(self, dict_):
     def update_from_dict(self, dict_):
-        self.schedule.update(dict((name, self._maybe_entry(name, entry))
-                                for name, entry in dict_.items()))
+        self.schedule.update(dict(
+            (name, self._maybe_entry(name, entry))
+            for name, entry in dict_.items()))
 
 
     def merge_inplace(self, b):
     def merge_inplace(self, b):
         schedule = self.schedule
         schedule = self.schedule
@@ -288,8 +293,9 @@ class Scheduler(object):
             error('Celerybeat: Connection error: %s. '
             error('Celerybeat: Connection error: %s. '
                   'Trying again in %s seconds...', exc, interval)
                   'Trying again in %s seconds...', exc, interval)
 
 
-        return self.connection.ensure_connection(_error_handler,
-                    self.app.conf.BROKER_CONNECTION_MAX_RETRIES)
+        return self.connection.ensure_connection(
+            _error_handler, self.app.conf.BROKER_CONNECTION_MAX_RETRIES
+        )
 
 
     def get_schedule(self):
     def get_schedule(self):
         return self.data
         return self.data
@@ -365,8 +371,8 @@ class PersistentScheduler(Scheduler):
         self.install_default_entries(self.schedule)
         self.install_default_entries(self.schedule)
         self._store.update(__version__=__version__, tz=tz, utc_enabled=utc)
         self._store.update(__version__=__version__, tz=tz, utc_enabled=utc)
         self.sync()
         self.sync()
-        debug('Current schedule:\n' + '\n'.join(repr(entry)
-                                    for entry in entries.itervalues()))
+        debug('Current schedule:\n' + '\n'.join(
+            repr(entry) for entry in entries.itervalues()))
 
 
     def get_schedule(self):
     def get_schedule(self):
         return self._store['entries']
         return self._store['entries']
@@ -392,13 +398,13 @@ class Service(object):
     scheduler_cls = PersistentScheduler
     scheduler_cls = PersistentScheduler
 
 
     def __init__(self, max_interval=None, schedule_filename=None,
     def __init__(self, max_interval=None, schedule_filename=None,
-            scheduler_cls=None, app=None):
+                 scheduler_cls=None, app=None):
         app = self.app = app_or_default(app)
         app = self.app = app_or_default(app)
         self.max_interval = (max_interval
         self.max_interval = (max_interval
                              or app.conf.CELERYBEAT_MAX_LOOP_INTERVAL)
                              or app.conf.CELERYBEAT_MAX_LOOP_INTERVAL)
         self.scheduler_cls = scheduler_cls or self.scheduler_cls
         self.scheduler_cls = scheduler_cls or self.scheduler_cls
-        self.schedule_filename = schedule_filename or \
-                                    app.conf.CELERYBEAT_SCHEDULE_FILENAME
+        self.schedule_filename = (
+            schedule_filename or app.conf.CELERYBEAT_SCHEDULE_FILENAME)
 
 
         self._is_shutdown = Event()
         self._is_shutdown = Event()
         self._is_stopped = Event()
         self._is_stopped = Event()

+ 5 - 5
celery/bin/base.py

@@ -96,7 +96,7 @@ class HelpFormatter(IndentedHelpFormatter):
 
 
     def format_description(self, description):
     def format_description(self, description):
         return text.ensure_2lines(text.fill_paragraphs(
         return text.ensure_2lines(text.fill_paragraphs(
-                text.dedent(description), self.width))
+            text.dedent(description), self.width))
 
 
 
 
 class Command(object):
 class Command(object):
@@ -222,8 +222,8 @@ class Command(object):
     def prepare_args(self, options, args):
     def prepare_args(self, options, args):
         if options:
         if options:
             options = dict((k, self.expanduser(v))
             options = dict((k, self.expanduser(v))
-                            for k, v in vars(options).iteritems()
-                                if not k.startswith('_'))
+                           for k, v in vars(options).iteritems()
+                           if not k.startswith('_'))
         args = map(self.expanduser, args)
         args = map(self.expanduser, args)
         self.check_args(args)
         self.check_args(args)
         return options, args
         return options, args
@@ -356,8 +356,8 @@ class Command(object):
                     in_option = m.groups()[0].strip()
                     in_option = m.groups()[0].strip()
                 assert in_option, 'missing long opt'
                 assert in_option, 'missing long opt'
             elif in_option and line.startswith(' ' * 4):
             elif in_option and line.startswith(' ' * 4):
-                options[in_option].append(find_rst_ref.sub(r'\1',
-                    line.strip()).replace('`', ''))
+                options[in_option].append(
+                    find_rst_ref.sub(r'\1', line.strip()).replace('`', ''))
         return options
         return options
 
 
     def with_pool_option(self, argv):
     def with_pool_option(self, argv):

+ 5 - 5
celery/bin/camqadm.py

@@ -92,8 +92,8 @@ class Spec(object):
             ('pobox', True)
             ('pobox', True)
 
 
         """
         """
-        return tuple(self.coerce(index, value)
-                for index, value in enumerate(arglist))
+        return tuple(
+            self.coerce(index, value) for index, value in enumerate(arglist))
 
 
     def format_response(self, response):
     def format_response(self, response):
         """Format the return value of this command in a human-friendly way."""
         """Format the return value of this command in a human-friendly way."""
@@ -112,7 +112,7 @@ class Spec(object):
 
 
     def format_signature(self):
     def format_signature(self):
         return ' '.join(self.format_arg(*padlist(list(arg), 3))
         return ' '.join(self.format_arg(*padlist(list(arg), 3))
-                            for arg in self.args)
+                        for arg in self.args)
 
 
 
 
 def dump_message(message):
 def dump_message(message):
@@ -258,11 +258,11 @@ class AMQShell(cmd.Cmd):
         """Return all commands starting with `text`, for tab-completion."""
         """Return all commands starting with `text`, for tab-completion."""
         names = self.get_names()
         names = self.get_names()
         first = [cmd for cmd in names
         first = [cmd for cmd in names
-                        if cmd.startswith(text.replace('_', '.'))]
+                 if cmd.startswith(text.replace('_', '.'))]
         if first:
         if first:
             return first
             return first
         return [cmd for cmd in names
         return [cmd for cmd in names
-                    if cmd.partition('.')[2].startswith(text)]
+                if cmd.partition('.')[2].startswith(text)]
 
 
     def dispatch(self, cmd, argline):
     def dispatch(self, cmd, argline):
         """Dispatch and execute the command.
         """Dispatch and execute the command.

+ 69 - 67
celery/bin/celery.py

@@ -99,7 +99,7 @@ class Command(BaseCommand):
     )
     )
 
 
     def __init__(self, app=None, no_color=False, stdout=sys.stdout,
     def __init__(self, app=None, no_color=False, stdout=sys.stdout,
-            stderr=sys.stderr, show_reply=True):
+                 stderr=sys.stderr, show_reply=True):
         super(Command, self).__init__(app=app)
         super(Command, self).__init__(app=app)
         self.colored = term.colored(enabled=not no_color)
         self.colored = term.colored(enabled=not no_color)
         self.stdout = stdout
         self.stdout = stdout
@@ -136,7 +136,7 @@ class Command(BaseCommand):
         self.arglist = argv[1:]
         self.arglist = argv[1:]
         self.parser = self.create_parser(self.prog_name, self.command)
         self.parser = self.create_parser(self.prog_name, self.command)
         options, args = self.prepare_args(
         options, args = self.prepare_args(
-                *self.parser.parse_args(self.arglist))
+            *self.parser.parse_args(self.arglist))
         self.colored = term.colored(enabled=not options['no_color'])
         self.colored = term.colored(enabled=not options['no_color'])
         self.quiet = options.get('quiet', False)
         self.quiet = options.get('quiet', False)
         self.show_body = options.get('show_body', True)
         self.show_body = options.get('show_body', True)
@@ -150,7 +150,7 @@ class Command(BaseCommand):
         if not n:
         if not n:
             return '- empty -'
             return '- empty -'
         return '\n'.join(str(c.reset(c.white('*'), ' %s' % (item, )))
         return '\n'.join(str(c.reset(c.white('*'), ' %s' % (item, )))
-                            for item in n)
+                         for item in n)
 
 
     def prettify_dict_ok_error(self, n):
     def prettify_dict_ok_error(self, n):
         c = self.colored
         c = self.colored
@@ -334,7 +334,7 @@ class list_(Command):
             raise Error('You must specify what to list (%s)' % available)
             raise Error('You must specify what to list (%s)' % available)
         if what not in topics:
         if what not in topics:
             raise Error('unknown topic %r (choose one of: %s)' % (
             raise Error('unknown topic %r (choose one of: %s)' % (
-                            what, available))
+                what, available))
         with self.app.connection() as conn:
         with self.app.connection() as conn:
             self.app.amqp.TaskConsumer(conn).declare()
             self.app.amqp.TaskConsumer(conn).declare()
             topics[what](conn.manager)
             topics[what](conn.manager)
@@ -351,16 +351,16 @@ class call(Command):
     """
     """
     args = '<task_name>'
     args = '<task_name>'
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-            Option('--args', '-a', help='positional arguments (json).'),
-            Option('--kwargs', '-k', help='keyword arguments (json).'),
-            Option('--eta', help='scheduled time (ISO-8601).'),
-            Option('--countdown', type='float',
-                help='eta in seconds from now (float/int).'),
-            Option('--expires', help='expiry time (ISO-8601/float/int).'),
-            Option('--serializer', default='json', help='defaults to json.'),
-            Option('--queue', help='custom queue name.'),
-            Option('--exchange', help='custom exchange name.'),
-            Option('--routing-key', help='custom routing key.'),
+        Option('--args', '-a', help='positional arguments (json).'),
+        Option('--kwargs', '-k', help='keyword arguments (json).'),
+        Option('--eta', help='scheduled time (ISO-8601).'),
+        Option('--countdown', type='float',
+               help='eta in seconds from now (float/int).'),
+        Option('--expires', help='expiry time (ISO-8601/float/int).'),
+        Option('--serializer', default='json', help='defaults to json.'),
+        Option('--queue', help='custom queue name.'),
+        Option('--exchange', help='custom exchange name.'),
+        Option('--routing-key', help='custom routing key.'),
     )
     )
 
 
     def run(self, name, *_, **kw):
     def run(self, name, *_, **kw):
@@ -428,9 +428,9 @@ class result(Command):
     """
     """
     args = '<task_id>'
     args = '<task_id>'
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-            Option('--task', '-t', help='name of task (if custom backend)'),
-            Option('--traceback', action='store_true',
-                   help='show traceback instead'),
+        Option('--task', '-t', help='name of task (if custom backend)'),
+        Option('--traceback', action='store_true',
+               help='show traceback instead'),
     )
     )
 
 
     def run(self, task_id, *args, **kwargs):
     def run(self, task_id, *args, **kwargs):
@@ -454,14 +454,14 @@ class _RemoteControl(Command):
     choices = None
     choices = None
     leaf = False
     leaf = False
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-                Option('--timeout', '-t', type='float',
-                    help='Timeout in seconds (float) waiting for reply'),
-                Option('--destination', '-d',
-                    help='Comma separated list of destination node names.'))
+        Option('--timeout', '-t', type='float',
+               help='Timeout in seconds (float) waiting for reply'),
+        Option('--destination', '-d',
+               help='Comma separated list of destination node names.'))
 
 
     @classmethod
     @classmethod
-    def get_command_info(self, command, indent=0, prefix='', color=None,
-            help=False):
+    def get_command_info(self, command,
+                         indent=0, prefix='', color=None, help=False):
         if help:
         if help:
             help = '|' + text.indent(self.choices[command][1], indent + 4)
             help = '|' + text.indent(self.choices[command][1], indent + 4)
         else:
         else:
@@ -484,7 +484,7 @@ class _RemoteControl(Command):
         color = color if color else lambda x: x
         color = color if color else lambda x: x
         prefix = prefix + ' ' if prefix else ''
         prefix = prefix + ' ' if prefix else ''
         return '\n'.join(self.get_command_info(c, indent, prefix, color, help)
         return '\n'.join(self.get_command_info(c, indent, prefix, color, help)
-                            for c in sorted(self.choices))
+                         for c in sorted(self.choices))
 
 
     @property
     @property
     def epilog(self):
     def epilog(self):
@@ -495,7 +495,7 @@ class _RemoteControl(Command):
 
 
     def usage(self, command):
     def usage(self, command):
         return '%%prog %s [options] %s <command> [arg1 .. argN]' % (
         return '%%prog %s [options] %s <command> [arg1 .. argN]' % (
-                command, self.args)
+            command, self.args)
 
 
     def call(self, *args, **kwargs):
     def call(self, *args, **kwargs):
         raise NotImplementedError('get_obj')
         raise NotImplementedError('get_obj')
@@ -595,10 +595,10 @@ class control(_RemoteControl):
         'disable_events': (1.0, 'tell worker(s) to disable events'),
         'disable_events': (1.0, 'tell worker(s) to disable events'),
         'add_consumer': (1.0, 'tell worker(s) to start consuming a queue'),
         'add_consumer': (1.0, 'tell worker(s) to start consuming a queue'),
         'cancel_consumer': (1.0, 'tell worker(s) to stop consuming a queue'),
         'cancel_consumer': (1.0, 'tell worker(s) to stop consuming a queue'),
-        'rate_limit': (1.0,
-            'tell worker(s) to modify the rate limit for a task type'),
-        'time_limit': (1.0,
-            'tell worker(s) to modify the time limit for a task type.'),
+        'rate_limit': (
+            1.0, 'tell worker(s) to modify the rate limit for a task type'),
+        'time_limit': (
+            1.0, 'tell worker(s) to modify the time limit for a task type.'),
         'autoscale': (1.0, 'change autoscale settings'),
         'autoscale': (1.0, 'change autoscale settings'),
         'pool_grow': (1.0, 'start more pool processes'),
         'pool_grow': (1.0, 'start more pool processes'),
         'pool_shrink': (1.0, 'use less pool processes'),
         'pool_shrink': (1.0, 'use less pool processes'),
@@ -607,7 +607,7 @@ class control(_RemoteControl):
     def call(self, method, *args, **options):
     def call(self, method, *args, **options):
         # XXX Python 2.5 doesn't support X(*args, reply=True, **kwargs)
         # XXX Python 2.5 doesn't support X(*args, reply=True, **kwargs)
         return getattr(self.app.control, method)(
         return getattr(self.app.control, method)(
-                *args, **dict(options, retry=True))
+            *args, **dict(options, retry=True))
 
 
     def pool_grow(self, method, n=1, **kwargs):
     def pool_grow(self, method, n=1, **kwargs):
         """[N=1]"""
         """[N=1]"""
@@ -630,7 +630,7 @@ class control(_RemoteControl):
         return self.call(method, task_name, soft, hard, reply=True, **kwargs)
         return self.call(method, task_name, soft, hard, reply=True, **kwargs)
 
 
     def add_consumer(self, method, queue, exchange=None,
     def add_consumer(self, method, queue, exchange=None,
-            exchange_type='direct', routing_key=None, **kwargs):
+                     exchange_type='direct', routing_key=None, **kwargs):
         """<queue> [exchange [type [routing_key]]]"""
         """<queue> [exchange [type [routing_key]]]"""
         return self.call(method, queue, exchange,
         return self.call(method, queue, exchange,
                          exchange_type, routing_key, reply=True, **kwargs)
                          exchange_type, routing_key, reply=True, **kwargs)
@@ -646,11 +646,12 @@ class status(Command):
     option_list = inspect.option_list
     option_list = inspect.option_list
 
 
     def run(self, *args, **kwargs):
     def run(self, *args, **kwargs):
-        replies = inspect(app=self.app,
-                          no_color=kwargs.get('no_color', False),
-                          stdout=self.stdout, stderr=self.stderr,
-                          show_reply=False) \
-                    .run('ping', **dict(kwargs, quiet=True, show_body=False))
+        replies = inspect(
+            app=self.app,
+            no_color=kwargs.get('no_color', False),
+            stdout=self.stdout, stderr=self.stderr,
+            show_reply=False).run(
+                'ping', **dict(kwargs, quiet=True, show_body=False))
         if not replies:
         if not replies:
             raise Error('No nodes replied within time constraint',
             raise Error('No nodes replied within time constraint',
                         status=EX_UNAVAILABLE)
                         status=EX_UNAVAILABLE)
@@ -674,18 +675,18 @@ class migrate(Command):
     """
     """
     args = '<source_url> <dest_url>'
     args = '<source_url> <dest_url>'
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-            Option('--limit', '-n', type='int',
-                    help='Number of tasks to consume (int)'),
-            Option('--timeout', '-t', type='float', default=1.0,
-                    help='Timeout in seconds (float) waiting for tasks'),
-            Option('--ack-messages', '-a', action='store_true',
-                    help='Ack messages from source broker.'),
-            Option('--tasks', '-T',
-                    help='List of task names to filter on.'),
-            Option('--queues', '-Q',
-                    help='List of queues to migrate.'),
-            Option('--forever', '-F', action='store_true',
-                    help='Continually migrate tasks until killed.'),
+        Option('--limit', '-n', type='int',
+               help='Number of tasks to consume (int)'),
+        Option('--timeout', '-t', type='float', default=1.0,
+               help='Timeout in seconds (float) waiting for tasks'),
+        Option('--ack-messages', '-a', action='store_true',
+               help='Ack messages from source broker.'),
+        Option('--tasks', '-T',
+               help='List of task names to filter on.'),
+        Option('--queues', '-Q',
+               help='List of queues to migrate.'),
+        Option('--forever', '-F', action='store_true',
+               help='Continually migrate tasks until killed.'),
     )
     )
 
 
     def on_migrate_task(self, state, body, message):
     def on_migrate_task(self, state, body, message):
@@ -729,20 +730,20 @@ class shell(Command):  # pragma: no cover
         <AsyncResult: 537b48c7-d6d3-427a-a24a-d1b4414035be>
         <AsyncResult: 537b48c7-d6d3-427a-a24a-d1b4414035be>
     """
     """
     option_list = Command.option_list + (
     option_list = Command.option_list + (
-                Option('--ipython', '-I',
-                    action='store_true', dest='force_ipython',
-                    help='force iPython.'),
-                Option('--bpython', '-B',
-                    action='store_true', dest='force_bpython',
-                    help='force bpython.'),
-                Option('--python', '-P',
-                    action='store_true', dest='force_python',
-                    help='force default Python shell.'),
-                Option('--without-tasks', '-T', action='store_true',
-                    help="don't add tasks to locals."),
-                Option('--eventlet', action='store_true',
-                    help='use eventlet.'),
-                Option('--gevent', action='store_true', help='use gevent.'),
+        Option('--ipython', '-I',
+               action='store_true', dest='force_ipython',
+               help='force iPython.'),
+        Option('--bpython', '-B',
+               action='store_true', dest='force_bpython',
+               help='force bpython.'),
+        Option('--python', '-P',
+               action='store_true', dest='force_python',
+               help='force default Python shell.'),
+        Option('--without-tasks', '-T', action='store_true',
+               help="don't add tasks to locals."),
+        Option('--eventlet', action='store_true',
+               help='use eventlet.'),
+        Option('--gevent', action='store_true', help='use gevent.'),
     )
     )
 
 
     def run(self, force_ipython=False, force_bpython=False,
     def run(self, force_ipython=False, force_bpython=False,
@@ -767,9 +768,10 @@ class shell(Command):  # pragma: no cover
                        'subtask': celery.subtask}
                        'subtask': celery.subtask}
 
 
         if not without_tasks:
         if not without_tasks:
-            self.locals.update(dict((task.__name__, task)
-                                for task in self.app.tasks.itervalues()
-                                    if not task.name.startswith('celery.')))
+            self.locals.update(dict(
+                (task.__name__, task) for task in self.app.tasks.itervalues()
+                if not task.name.startswith('celery.')),
+            )
 
 
         if force_python:
         if force_python:
             return self.invoke_fallback_shell()
             return self.invoke_fallback_shell()
@@ -801,7 +803,7 @@ class shell(Command):  # pragma: no cover
         else:
         else:
             import rlcompleter
             import rlcompleter
             readline.set_completer(
             readline.set_completer(
-                    rlcompleter.Completer(self.locals).complete)
+                rlcompleter.Completer(self.locals).complete)
             readline.parse_and_bind('tab:complete')
             readline.parse_and_bind('tab:complete')
         code.interact(local=self.locals)
         code.interact(local=self.locals)
 
 
@@ -913,7 +915,7 @@ class CeleryCommand(BaseCommand):
             ret.extend([
             ret.extend([
                 text.indent('+ %s: ' % white(cls), indent),
                 text.indent('+ %s: ' % white(cls), indent),
                 '\n'.join(self.get_command_info(command, indent + 4, color)
                 '\n'.join(self.get_command_info(command, indent + 4, color)
-                            for command in commands),
+                          for command in commands),
                 ''
                 ''
             ])
             ])
         return '\n'.join(ret).strip()
         return '\n'.join(ret).strip()

+ 9 - 9
celery/bin/celeryd.py

@@ -138,7 +138,7 @@ class WorkerCommand(Command):
         # Pools like eventlet/gevent needs to patch libs as early
         # Pools like eventlet/gevent needs to patch libs as early
         # as possible.
         # as possible.
         kwargs['pool_cls'] = concurrency.get_implementation(
         kwargs['pool_cls'] = concurrency.get_implementation(
-                    kwargs.get('pool_cls') or self.app.conf.CELERYD_POOL)
+            kwargs.get('pool_cls') or self.app.conf.CELERYD_POOL)
         if self.app.IS_WINDOWS and kwargs.get('beat'):
         if self.app.IS_WINDOWS and kwargs.get('beat'):
             self.die('-B option does not work on Windows.  '
             self.die('-B option does not work on Windows.  '
                      'Please run celerybeat as a separate service.')
                      'Please run celerybeat as a separate service.')
@@ -149,7 +149,7 @@ class WorkerCommand(Command):
             except KeyError:  # pragma: no cover
             except KeyError:  # pragma: no cover
                 self.die('Unknown level %r. Please use one of %s.' % (
                 self.die('Unknown level %r. Please use one of %s.' % (
                     loglevel, '|'.join(l for l in LOG_LEVELS
                     loglevel, '|'.join(l for l in LOG_LEVELS
-                      if isinstance(l, basestring))))
+                                       if isinstance(l, basestring))))
         return self.app.Worker(**kwargs).run()
         return self.app.Worker(**kwargs).run()
 
 
     def with_pool_option(self, argv):
     def with_pool_option(self, argv):
@@ -161,7 +161,7 @@ class WorkerCommand(Command):
         conf = self.app.conf
         conf = self.app.conf
         return (
         return (
             Option('-c', '--concurrency',
             Option('-c', '--concurrency',
-                default=conf.CELERYD_CONCURRENCY, type='int'),
+                   default=conf.CELERYD_CONCURRENCY, type='int'),
             Option('-P', '--pool', default=conf.CELERYD_POOL, dest='pool_cls'),
             Option('-P', '--pool', default=conf.CELERYD_POOL, dest='pool_cls'),
             Option('--purge', '--discard', default=False, action='store_true'),
             Option('--purge', '--discard', default=False, action='store_true'),
             Option('-f', '--logfile', default=conf.CELERYD_LOG_FILE),
             Option('-f', '--logfile', default=conf.CELERYD_LOG_FILE),
@@ -169,18 +169,18 @@ class WorkerCommand(Command):
             Option('-n', '--hostname'),
             Option('-n', '--hostname'),
             Option('-B', '--beat', action='store_true'),
             Option('-B', '--beat', action='store_true'),
             Option('-s', '--schedule', dest='schedule_filename',
             Option('-s', '--schedule', dest='schedule_filename',
-                default=conf.CELERYBEAT_SCHEDULE_FILENAME),
+                   default=conf.CELERYBEAT_SCHEDULE_FILENAME),
             Option('--scheduler', dest='scheduler_cls'),
             Option('--scheduler', dest='scheduler_cls'),
             Option('-S', '--statedb',
             Option('-S', '--statedb',
-                default=conf.CELERYD_STATE_DB, dest='state_db'),
+                   default=conf.CELERYD_STATE_DB, dest='state_db'),
             Option('-E', '--events', default=conf.CELERY_SEND_EVENTS,
             Option('-E', '--events', default=conf.CELERY_SEND_EVENTS,
-                action='store_true', dest='send_events'),
+                   action='store_true', dest='send_events'),
             Option('--time-limit', type='float', dest='task_time_limit',
             Option('--time-limit', type='float', dest='task_time_limit',
-                default=conf.CELERYD_TASK_TIME_LIMIT),
+                   default=conf.CELERYD_TASK_TIME_LIMIT),
             Option('--soft-time-limit', dest='task_soft_time_limit',
             Option('--soft-time-limit', dest='task_soft_time_limit',
-                default=conf.CELERYD_TASK_SOFT_TIME_LIMIT, type='float'),
+                   default=conf.CELERYD_TASK_SOFT_TIME_LIMIT, type='float'),
             Option('--maxtasksperchild', dest='max_tasks_per_child',
             Option('--maxtasksperchild', dest='max_tasks_per_child',
-                default=conf.CELERYD_MAX_TASKS_PER_CHILD, type='int'),
+                   default=conf.CELERYD_MAX_TASKS_PER_CHILD, type='int'),
             Option('--queues', '-Q', default=[]),
             Option('--queues', '-Q', default=[]),
             Option('--include', '-I', default=[]),
             Option('--include', '-I', default=[]),
             Option('--pidfile'),
             Option('--pidfile'),

+ 6 - 5
celery/bin/celeryd_detach.py

@@ -27,9 +27,10 @@ from celery.bin.base import daemon_options, Option
 logger = get_logger(__name__)
 logger = get_logger(__name__)
 
 
 OPTION_LIST = daemon_options(default_pidfile='celeryd.pid') + (
 OPTION_LIST = daemon_options(default_pidfile='celeryd.pid') + (
-                Option('--fake',
-                       default=False, action='store_true', dest='fake',
-                       help="Don't fork (for debugging purposes)"), )
+    Option('--fake',
+           default=False, action='store_true', dest='fake',
+           help="Don't fork (for debugging purposes)"),
+)
 
 
 
 
 def detach(path, argv, logfile=None, pidfile=None, uid=None,
 def detach(path, argv, logfile=None, pidfile=None, uid=None,
@@ -76,7 +77,7 @@ class PartialOptionParser(OptionParser):
                         self.error('%s option requires an argument' % opt)
                         self.error('%s option requires an argument' % opt)
                     else:
                     else:
                         self.error('%s option requires %d arguments' % (
                         self.error('%s option requires %d arguments' % (
-                                    opt, nargs))
+                            opt, nargs))
                 elif nargs == 1:
                 elif nargs == 1:
                     value = rargs.pop(0)
                     value = rargs.pop(0)
                 else:
                 else:
@@ -143,7 +144,7 @@ class detached_celeryd(object):
         options, values, leftovers = self.parse_options(prog_name, argv[1:])
         options, values, leftovers = self.parse_options(prog_name, argv[1:])
         sys.exit(detach(path=self.execv_path,
         sys.exit(detach(path=self.execv_path,
                  argv=self.execv_argv + leftovers + config,
                  argv=self.execv_argv + leftovers + config,
-                  **vars(options)))
+                 **vars(options)))
 
 
 
 
 def main():
 def main():

+ 12 - 10
celery/bin/celeryd_multi.py

@@ -109,7 +109,7 @@ from celery.utils import term
 from celery.utils.text import pluralize
 from celery.utils.text import pluralize
 
 
 SIGNAMES = set(sig for sig in dir(signal)
 SIGNAMES = set(sig for sig in dir(signal)
-                        if sig.startswith('SIG') and '_' not in sig)
+               if sig.startswith('SIG') and '_' not in sig)
 SIGMAP = dict((getattr(signal, name), name) for name in SIGNAMES)
 SIGMAP = dict((getattr(signal, name), name) for name in SIGNAMES)
 
 
 USAGE = """\
 USAGE = """\
@@ -141,7 +141,7 @@ class MultiTool(object):
     retcode = 0  # Final exit code.
     retcode = 0  # Final exit code.
 
 
     def __init__(self, env=None, fh=None, quiet=False, verbose=False,
     def __init__(self, env=None, fh=None, quiet=False, verbose=False,
-            no_color=False, nosplash=False):
+                 no_color=False, nosplash=False):
         self.fh = fh or sys.stderr
         self.fh = fh or sys.stderr
         self.env = env
         self.env = env
         self.nosplash = nosplash
         self.nosplash = nosplash
@@ -192,8 +192,9 @@ class MultiTool(object):
 
 
     def names(self, argv, cmd):
     def names(self, argv, cmd):
         p = NamespacedOptionParser(argv)
         p = NamespacedOptionParser(argv)
-        self.say('\n'.join(hostname
-                        for hostname, _, _ in multi_args(p, cmd)))
+        self.say('\n'.join(
+            hostname for hostname, _, _ in multi_args(p, cmd)),
+        )
 
 
     def get(self, argv, cmd):
     def get(self, argv, cmd):
         wanted = argv[0]
         wanted = argv[0]
@@ -206,8 +207,9 @@ class MultiTool(object):
     def show(self, argv, cmd):
     def show(self, argv, cmd):
         p = NamespacedOptionParser(argv)
         p = NamespacedOptionParser(argv)
         self.note('> Starting nodes...')
         self.note('> Starting nodes...')
-        self.say('\n'.join(' '.join(worker)
-                        for _, worker, _ in multi_args(p, cmd)))
+        self.say('\n'.join(
+            ' '.join(worker) for _, worker, _ in multi_args(p, cmd)),
+        )
 
 
     def start(self, argv, cmd):
     def start(self, argv, cmd):
         self.splash()
         self.splash()
@@ -234,7 +236,7 @@ class MultiTool(object):
             if exc.errno != errno.ESRCH:
             if exc.errno != errno.ESRCH:
                 raise
                 raise
             self.note('Could not signal %s (%s): No such process' % (
             self.note('Could not signal %s (%s): No such process' % (
-                        nodename, pid))
+                nodename, pid))
             return False
             return False
         return True
         return True
 
 
@@ -248,7 +250,7 @@ class MultiTool(object):
         return True
         return True
 
 
     def shutdown_nodes(self, nodes, sig=signal.SIGTERM, retry=None,
     def shutdown_nodes(self, nodes, sig=signal.SIGTERM, retry=None,
-            callback=None):
+                       callback=None):
         if not nodes:
         if not nodes:
             return
             return
         P = set(nodes)
         P = set(nodes)
@@ -433,7 +435,7 @@ def multi_args(p, cmd='celeryd', append='', prefix='', suffix=''):
     cmd = options.pop('--cmd', cmd)
     cmd = options.pop('--cmd', cmd)
     append = options.pop('--append', append)
     append = options.pop('--append', append)
     hostname = options.pop('--hostname',
     hostname = options.pop('--hostname',
-                   options.pop('-n', socket.gethostname()))
+                           options.pop('-n', socket.gethostname()))
     prefix = options.pop('--prefix', prefix) or ''
     prefix = options.pop('--prefix', prefix) or ''
     suffix = options.pop('--suffix', suffix) or '.' + hostname
     suffix = options.pop('--suffix', suffix) or '.' + hostname
     if suffix in ('""', "''"):
     if suffix in ('""', "''"):
@@ -451,7 +453,7 @@ def multi_args(p, cmd='celeryd', append='', prefix='', suffix=''):
                                 '%n': name})
                                 '%n': name})
         argv = ([expand(cmd)] +
         argv = ([expand(cmd)] +
                 [format_opt(opt, expand(value))
                 [format_opt(opt, expand(value))
-                        for opt, value in p.optmerge(name, options).items()] +
+                 for opt, value in p.optmerge(name, options).items()] +
                 [passthrough])
                 [passthrough])
         if append:
         if append:
             argv.append(expand(append))
             argv.append(expand(append))

+ 2 - 2
celery/bin/celeryev.py

@@ -78,8 +78,8 @@ class EvCommand(Command):
         return evtop(app=self.app)
         return evtop(app=self.app)
 
 
     def run_evcam(self, camera, logfile=None, pidfile=None, uid=None,
     def run_evcam(self, camera, logfile=None, pidfile=None, uid=None,
-            gid=None, umask=None, working_directory=None,
-            detach=False, **kwargs):
+                  gid=None, umask=None, working_directory=None,
+                  detach=False, **kwargs):
         from celery.events.snapshot import evcam
         from celery.events.snapshot import evcam
         workdir = working_directory
         workdir = working_directory
         self.set_process_status('cam')
         self.set_process_status('cam')

+ 39 - 25
celery/canvas.py

@@ -82,7 +82,7 @@ class Signature(dict):
         return Signature(d)
         return Signature(d)
 
 
     def __init__(self, task=None, args=None, kwargs=None, options=None,
     def __init__(self, task=None, args=None, kwargs=None, options=None,
-                type=None, subtask_type=None, immutable=False, **ex):
+                 type=None, subtask_type=None, immutable=False, **ex):
         init = dict.__init__
         init = dict.__init__
 
 
         if isinstance(task, dict):
         if isinstance(task, dict):
@@ -96,11 +96,12 @@ class Signature(dict):
         else:
         else:
             self._type = task
             self._type = task
 
 
-        init(self, task=task_name, args=tuple(args or ()),
-                                   kwargs=kwargs or {},
-                                   options=dict(options or {}, **ex),
-                                   subtask_type=subtask_type,
-                                   immutable=immutable)
+        init(self,
+             task=task_name, args=tuple(args or ()),
+             kwargs=kwargs or {},
+             options=dict(options or {}, **ex),
+             subtask_type=subtask_type,
+             immutable=immutable)
 
 
     def __call__(self, *partial_args, **partial_kwargs):
     def __call__(self, *partial_args, **partial_kwargs):
         return self.apply_async(partial_args, partial_kwargs)
         return self.apply_async(partial_args, partial_kwargs)
@@ -172,9 +173,11 @@ class Signature(dict):
         return self.append_to_list_option('link_error', errback)
         return self.append_to_list_option('link_error', errback)
 
 
     def flatten_links(self):
     def flatten_links(self):
-        return list(chain_from_iterable(_chain([[self]],
-                (link.flatten_links()
-                    for link in maybe_list(self.options.get('link')) or []))))
+        return list(chain_from_iterable(_chain(
+            [[self]],
+            (link.flatten_links()
+                for link in maybe_list(self.options.get('link')) or [])
+        )))
 
 
     def __or__(self, other):
     def __or__(self, other):
         if not isinstance(self, chain) and isinstance(other, chain):
         if not isinstance(self, chain) and isinstance(other, chain):
@@ -231,8 +234,9 @@ class chain(Signature):
 
 
     def __init__(self, *tasks, **options):
     def __init__(self, *tasks, **options):
         tasks = tasks[0] if len(tasks) == 1 and is_list(tasks[0]) else tasks
         tasks = tasks[0] if len(tasks) == 1 and is_list(tasks[0]) else tasks
-        Signature.__init__(self,
-            'celery.chain', (), {'tasks': tasks}, **options)
+        Signature.__init__(
+            self, 'celery.chain', (), {'tasks': tasks}, **options
+        )
         self.tasks = tasks
         self.tasks = tasks
         self.subtask_type = 'chain'
         self.subtask_type = 'chain'
 
 
@@ -257,14 +261,17 @@ class _basemap(Signature):
     _unpack_args = itemgetter('task', 'it')
     _unpack_args = itemgetter('task', 'it')
 
 
     def __init__(self, task, it, **options):
     def __init__(self, task, it, **options):
-        Signature.__init__(self, self._task_name, (),
-                {'task': task, 'it': regen(it)}, immutable=True, **options)
+        Signature.__init__(
+            self, self._task_name, (),
+            {'task': task, 'it': regen(it)}, immutable=True, **options
+        )
 
 
     def apply_async(self, args=(), kwargs={}, **opts):
     def apply_async(self, args=(), kwargs={}, **opts):
         # need to evaluate generators
         # need to evaluate generators
         task, it = self._unpack_args(self.kwargs)
         task, it = self._unpack_args(self.kwargs)
-        return self.type.apply_async((),
-                {'task': task, 'it': list(it)}, **opts)
+        return self.type.apply_async(
+            (), {'task': task, 'it': list(it)}, **opts
+        )
 
 
     @classmethod
     @classmethod
     def from_dict(self, d):
     def from_dict(self, d):
@@ -293,9 +300,11 @@ class chunks(Signature):
     _unpack_args = itemgetter('task', 'it', 'n')
     _unpack_args = itemgetter('task', 'it', 'n')
 
 
     def __init__(self, task, it, n, **options):
     def __init__(self, task, it, n, **options):
-        Signature.__init__(self, 'celery.chunks', (),
-                {'task': task, 'it': regen(it), 'n': n},
-                immutable=True, **options)
+        Signature.__init__(
+            self, 'celery.chunks', (),
+            {'task': task, 'it': regen(it), 'n': n},
+            immutable=True, **options
+        )
 
 
     @classmethod
     @classmethod
     def from_dict(self, d):
     def from_dict(self, d):
@@ -331,8 +340,9 @@ class group(Signature):
     def __init__(self, *tasks, **options):
     def __init__(self, *tasks, **options):
         if len(tasks) == 1:
         if len(tasks) == 1:
             tasks = _maybe_group(tasks[0])
             tasks = _maybe_group(tasks[0])
-        Signature.__init__(self,
-            'celery.group', (), {'tasks': tasks}, **options)
+        Signature.__init__(
+            self, 'celery.group', (), {'tasks': tasks}, **options
+        )
         self.tasks, self.subtask_type = tasks, 'group'
         self.tasks, self.subtask_type = tasks, 'group'
 
 
     @classmethod
     @classmethod
@@ -345,8 +355,9 @@ class group(Signature):
         return group(tasks, **kwdict(d['options']))
         return group(tasks, **kwdict(d['options']))
 
 
     def __call__(self, *partial_args, **options):
     def __call__(self, *partial_args, **options):
-        tasks, result, gid, args = self.type.prepare(options,
-                    map(Signature.clone, self.tasks), partial_args)
+        tasks, result, gid, args = self.type.prepare(
+            options, map(Signature.clone, self.tasks), partial_args,
+        )
         return self.type(tasks, result, gid, args)
         return self.type(tasks, result, gid, args)
 
 
     def _freeze(self, _id=None):
     def _freeze(self, _id=None):
@@ -381,9 +392,12 @@ class chord(Signature):
     Chord = Chord
     Chord = Chord
 
 
     def __init__(self, header, body=None, task='celery.chord',
     def __init__(self, header, body=None, task='celery.chord',
-            args=(), kwargs={}, **options):
-        Signature.__init__(self, task, args, dict(kwargs,
-            header=_maybe_group(header), body=maybe_subtask(body)), **options)
+                 args=(), kwargs={}, **options):
+        Signature.__init__(
+            self, task, args,
+            dict(kwargs, header=_maybe_group(header),
+                 body=maybe_subtask(body)), **options
+        )
         self.subtask_type = 'chord'
         self.subtask_type = 'chord'
 
 
     @classmethod
     @classmethod

+ 5 - 5
celery/concurrency/base.py

@@ -21,7 +21,7 @@ logger = get_logger('celery.concurrency')
 
 
 
 
 def apply_target(target, args=(), kwargs={}, callback=None,
 def apply_target(target, args=(), kwargs={}, callback=None,
-        accept_callback=None, pid=None, **_):
+                 accept_callback=None, pid=None, **_):
     if accept_callback:
     if accept_callback:
         accept_callback(pid or os.getpid(), time.time())
         accept_callback(pid or os.getpid(), time.time())
     callback(target(*args, **kwargs))
     callback(target(*args, **kwargs))
@@ -56,8 +56,8 @@ class BasePool(object):
     #: only used by multiprocessing pool
     #: only used by multiprocessing pool
     uses_semaphore = False
     uses_semaphore = False
 
 
-    def __init__(self, limit=None, putlocks=True, forking_enable=True,
-            **options):
+    def __init__(self, limit=None, putlocks=True,
+                 forking_enable=True, **options):
         self.limit = limit
         self.limit = limit
         self.putlocks = putlocks
         self.putlocks = putlocks
         self.options = options
         self.options = options
@@ -93,11 +93,11 @@ class BasePool(object):
 
 
     def terminate_job(self, pid):
     def terminate_job(self, pid):
         raise NotImplementedError(
         raise NotImplementedError(
-                '%s does not implement kill_job' % (self.__class__, ))
+            '%s does not implement kill_job' % (self.__class__, ))
 
 
     def restart(self):
     def restart(self):
         raise NotImplementedError(
         raise NotImplementedError(
-                '%s does not implement restart' % (self.__class__, ))
+            '%s does not implement restart' % (self.__class__, ))
 
 
     def stop(self):
     def stop(self):
         self.on_stop()
         self.on_stop()

+ 4 - 3
celery/concurrency/eventlet.py

@@ -144,9 +144,10 @@ class TaskPool(base.BasePool):
         signals.eventlet_pool_postshutdown.send(sender=self)
         signals.eventlet_pool_postshutdown.send(sender=self)
 
 
     def on_apply(self, target, args=None, kwargs=None, callback=None,
     def on_apply(self, target, args=None, kwargs=None, callback=None,
-            accept_callback=None, **_):
-        self._quick_apply_sig(sender=self,
-                target=target, args=args, kwargs=kwargs)
+                 accept_callback=None, **_):
+        self._quick_apply_sig(
+            sender=self, target=target, args=args, kwargs=kwargs,
+        )
         self._quick_put(apply_target, target, args, kwargs,
         self._quick_put(apply_target, target, args, kwargs,
                         callback, accept_callback,
                         callback, accept_callback,
                         self.getpid)
                         self.getpid)

+ 2 - 1
celery/concurrency/gevent.py

@@ -128,7 +128,8 @@ class TaskPool(BasePool):
             self._pool.join()
             self._pool.join()
 
 
     def on_apply(self, target, args=None, kwargs=None, callback=None,
     def on_apply(self, target, args=None, kwargs=None, callback=None,
-            accept_callback=None, timeout=None, timeout_callback=None, **_):
+                 accept_callback=None, timeout=None,
+                 timeout_callback=None, **_):
         timeout = self.timeout if timeout is None else timeout
         timeout = self.timeout if timeout is None else timeout
         return self._quick_put(apply_timeout if timeout else apply_target,
         return self._quick_put(apply_timeout if timeout else apply_target,
                                target, args, kwargs, callback, accept_callback,
                                target, args, kwargs, callback, accept_callback,

+ 2 - 2
celery/concurrency/threads.py

@@ -26,7 +26,7 @@ class TaskPool(BasePool):
             import threadpool
             import threadpool
         except ImportError:
         except ImportError:
             raise ImportError(
             raise ImportError(
-                    'The threaded pool requires the threadpool module.')
+                'The threaded pool requires the threadpool module.')
         self.WorkRequest = threadpool.WorkRequest
         self.WorkRequest = threadpool.WorkRequest
         self.ThreadPool = threadpool.ThreadPool
         self.ThreadPool = threadpool.ThreadPool
         super(TaskPool, self).__init__(*args, **kwargs)
         super(TaskPool, self).__init__(*args, **kwargs)
@@ -43,7 +43,7 @@ class TaskPool(BasePool):
         self._pool.dismissWorkers(self.limit, do_join=True)
         self._pool.dismissWorkers(self.limit, do_join=True)
 
 
     def on_apply(self, target, args=None, kwargs=None, callback=None,
     def on_apply(self, target, args=None, kwargs=None, callback=None,
-            accept_callback=None, **_):
+                 accept_callback=None, **_):
         req = self.WorkRequest(apply_target, (target, args, kwargs, callback,
         req = self.WorkRequest(apply_target, (target, args, kwargs, callback,
                                               accept_callback))
                                               accept_callback))
         self._quick_put(req)
         self._quick_put(req)

+ 7 - 5
celery/contrib/batches.py

@@ -211,7 +211,7 @@ class Batches(Task):
 
 
     def flush(self, requests):
     def flush(self, requests):
         return self.apply_buffer(requests, ([SimpleRequest.from_request(r)
         return self.apply_buffer(requests, ([SimpleRequest.from_request(r)
-                                                for r in requests], ))
+                                             for r in requests], ))
 
 
     def _do_flush(self):
     def _do_flush(self):
         logger.debug('Batches: Wake-up to flush buffer...')
         logger.debug('Batches: Wake-up to flush buffer...')
@@ -237,7 +237,9 @@ class Batches(Task):
         def on_return(result):
         def on_return(result):
             [req.acknowledge() for req in acks_late[True]]
             [req.acknowledge() for req in acks_late[True]]
 
 
-        return self._pool.apply_async(apply_batches_task,
-                    (self, args, 0, None),
-                    accept_callback=on_accepted,
-                    callback=acks_late[True] and on_return or noop)
+        return self._pool.apply_async(
+            apply_batches_task,
+            (self, args, 0, None),
+            accept_callback=on_accepted,
+            callback=acks_late[True] and on_return or noop,
+        )

+ 32 - 16
celery/contrib/bundles.py

@@ -21,29 +21,45 @@ django_celery = Dist('django-celery', VERSION, **defaults)
 flask_celery = Dist('Flask-Celery', VERSION, **defaults)
 flask_celery = Dist('Flask-Celery', VERSION, **defaults)
 
 
 bundles = [
 bundles = [
-    celery.Bundle('celery-with-redis',
+    celery.Bundle(
+        'celery-with-redis',
         'Bundle installing the dependencies for Celery and Redis',
         'Bundle installing the dependencies for Celery and Redis',
-        requires=['redis>=2.4.4']),
-    celery.Bundle('celery-with-mongodb',
+        requires=['redis>=2.4.4'],
+    ),
+    celery.Bundle(
+        'celery-with-mongodb',
         'Bundle installing the dependencies for Celery and MongoDB',
         'Bundle installing the dependencies for Celery and MongoDB',
-        requires=['pymongo']),
-    celery.Bundle('celery-with-couchdb',
+        requires=['pymongo'],
+    ),
+    celery.Bundle(
+        'celery-with-couchdb',
         'Bundle installing the dependencies for Celery and CouchDB',
         'Bundle installing the dependencies for Celery and CouchDB',
-        requires=['couchdb']),
-    celery.Bundle('celery-with-beanstalk',
+        requires=['couchdb'],
+    ),
+    celery.Bundle(
+        'celery-with-beanstalk',
         'Bundle installing the dependencies for Celery and Beanstalk',
         'Bundle installing the dependencies for Celery and Beanstalk',
-        requires=['beanstalkc']),
+        requires=['beanstalkc'],
+    ),
 
 
-    django_celery.Bundle('django-celery-with-redis',
+    django_celery.Bundle(
+        'django-celery-with-redis',
         'Bundle installing the dependencies for Django-Celery and Redis',
         'Bundle installing the dependencies for Django-Celery and Redis',
-        requires=['redis>=2.4.4']),
-    django_celery.Bundle('django-celery-with-mongodb',
+        requires=['redis>=2.4.4'],
+    ),
+    django_celery.Bundle(
+        'django-celery-with-mongodb',
         'Bundle installing the dependencies for Django-Celery and MongoDB',
         'Bundle installing the dependencies for Django-Celery and MongoDB',
-        requires=['pymongo']),
-    django_celery.Bundle('django-celery-with-couchdb',
+        requires=['pymongo'],
+    ),
+    django_celery.Bundle(
+        'django-celery-with-couchdb',
         'Bundle installing the dependencies for Django-Celery and CouchDB',
         'Bundle installing the dependencies for Django-Celery and CouchDB',
-        requires=['couchdb']),
-    django_celery.Bundle('django-celery-with-beanstalk',
+        requires=['couchdb'],
+    ),
+    django_celery.Bundle(
+        'django-celery-with-beanstalk',
         'Bundle installing the dependencies for Django-Celery and Beanstalk',
         'Bundle installing the dependencies for Django-Celery and Beanstalk',
-        requires=['beanstalkc']),
+        requires=['beanstalkc'],
+    ),
 ]
 ]

+ 13 - 13
celery/contrib/migrate.py

@@ -45,10 +45,10 @@ class State(object):
 
 
 
 
 def republish(producer, message, exchange=None, routing_key=None,
 def republish(producer, message, exchange=None, routing_key=None,
-        remove_props=['application_headers',
-                      'content_type',
-                      'content_encoding',
-                      'headers']):
+              remove_props=['application_headers',
+                            'content_type',
+                            'content_encoding',
+                            'headers']):
     body = ensure_bytes(message.body)  # use raw message body.
     body = ensure_bytes(message.body)  # use raw message body.
     info, headers, props = (message.delivery_info,
     info, headers, props = (message.delivery_info,
                             message.headers, message.properties)
                             message.headers, message.properties)
@@ -87,7 +87,7 @@ def filter_callback(callback, tasks):
 
 
 
 
 def migrate_tasks(source, dest, migrate=migrate_task, app=None,
 def migrate_tasks(source, dest, migrate=migrate_task, app=None,
-        queues=None, **kwargs):
+                  queues=None, **kwargs):
     app = app_or_default(app)
     app = app_or_default(app)
     queues = prepare_queues(queues)
     queues = prepare_queues(queues)
     producer = app.amqp.TaskProducer(dest)
     producer = app.amqp.TaskProducer(dest)
@@ -114,8 +114,8 @@ def _maybe_queue(app, q):
 
 
 
 
 def move(predicate, connection=None, exchange=None, routing_key=None,
 def move(predicate, connection=None, exchange=None, routing_key=None,
-        source=None, app=None, callback=None, limit=None, transform=None,
-        **kwargs):
+         source=None, app=None, callback=None, limit=None, transform=None,
+         **kwargs):
     """Find tasks by filtering them and move the tasks to a new queue.
     """Find tasks by filtering them and move the tasks to a new queue.
 
 
     :param predicate: Filter function used to decide which messages
     :param predicate: Filter function used to decide which messages
@@ -191,7 +191,7 @@ def move(predicate, connection=None, exchange=None, routing_key=None,
                 else:
                 else:
                     ex, rk = expand_dest(ret, exchange, routing_key)
                     ex, rk = expand_dest(ret, exchange, routing_key)
                 republish(producer, message,
                 republish(producer, message,
-                        exchange=ex, routing_key=rk)
+                          exchange=ex, routing_key=rk)
                 message.ack()
                 message.ack()
 
 
                 state.filtered += 1
                 state.filtered += 1
@@ -224,16 +224,16 @@ def prepare_queues(queues):
         queues = queues.split(',')
         queues = queues.split(',')
     if isinstance(queues, list):
     if isinstance(queues, list):
         queues = dict(tuple(islice(cycle(q.split(':')), None, 2))
         queues = dict(tuple(islice(cycle(q.split(':')), None, 2))
-                        for q in queues)
+                      for q in queues)
     if queues is None:
     if queues is None:
         queues = {}
         queues = {}
     return queues
     return queues
 
 
 
 
 def start_filter(app, conn, filter, limit=None, timeout=1.0,
 def start_filter(app, conn, filter, limit=None, timeout=1.0,
-        ack_messages=False, tasks=None, queues=None,
-        callback=None, forever=False, on_declare_queue=None,
-        consume_from=None, state=None, **kwargs):
+                 ack_messages=False, tasks=None, queues=None,
+                 callback=None, forever=False, on_declare_queue=None,
+                 consume_from=None, state=None, **kwargs):
     state = state or State()
     state = state or State()
     queues = prepare_queues(queues)
     queues = prepare_queues(queues)
     if isinstance(tasks, basestring):
     if isinstance(tasks, basestring):
@@ -352,4 +352,4 @@ move_direct_by_taskmap = partial(move_by_taskmap, transform=worker_direct)
 
 
 def filter_status(state, body, message):
 def filter_status(state, body, message):
     print('Moving task %s/%s: %s[%s]' % (
     print('Moving task %s/%s: %s[%s]' % (
-            state.filtered, state.strtotal, body['task'], body['id']))
+        state.filtered, state.strtotal, body['task'], body['id']))

+ 5 - 4
celery/contrib/rdb.py

@@ -65,14 +65,15 @@ class Rdb(Pdb):
     _sock = None
     _sock = None
 
 
     def __init__(self, host=CELERY_RDB_HOST, port=CELERY_RDB_PORT,
     def __init__(self, host=CELERY_RDB_HOST, port=CELERY_RDB_PORT,
-            port_search_limit=100, port_skew=+0, out=sys.stdout):
+                 port_search_limit=100, port_skew=+0, out=sys.stdout):
         self.active = True
         self.active = True
         self.out = out
         self.out = out
 
 
         self._prev_handles = sys.stdin, sys.stdout
         self._prev_handles = sys.stdin, sys.stdout
 
 
-        self._sock, this_port = self.get_avail_port(host, port,
-            port_search_limit, port_skew)
+        self._sock, this_port = self.get_avail_port(
+            host, port, port_search_limit, port_skew,
+        )
         self._sock.listen(1)
         self._sock.listen(1)
         me = '%s:%s' % (self.me, this_port)
         me = '%s:%s' % (self.me, this_port)
         context = self.context = {'me': me, 'host': host, 'port': this_port}
         context = self.context = {'me': me, 'host': host, 'port': this_port}
@@ -85,7 +86,7 @@ class Rdb(Pdb):
         self.say('%(me)s: In session with %(remote_addr)s' % context)
         self.say('%(me)s: In session with %(remote_addr)s' % context)
         self._handle = sys.stdin = sys.stdout = self._client.makefile('rw')
         self._handle = sys.stdin = sys.stdout = self._client.makefile('rw')
         Pdb.__init__(self, completekey='tab',
         Pdb.__init__(self, completekey='tab',
-                           stdin=self._handle, stdout=self._handle)
+                     stdin=self._handle, stdout=self._handle)
 
 
     def get_avail_port(self, host, port, search_limit=100, skew=+0):
     def get_avail_port(self, host, port, search_limit=100, skew=+0):
         try:
         try:

+ 2 - 2
celery/datastructures.py

@@ -65,8 +65,8 @@ class DependencyGraph(object):
         components = self._tarjan72()
         components = self._tarjan72()
 
 
         NC = dict((node, component)
         NC = dict((node, component)
-                    for component in components
-                        for node in component)
+                  for component in components
+                  for node in component)
         for component in components:
         for component in components:
             graph.add_arc(component)
             graph.add_arc(component)
         for node in self:
         for node in self:

+ 5 - 5
celery/events/__init__.py

@@ -71,8 +71,8 @@ class EventDispatcher(object):
     """
     """
 
 
     def __init__(self, connection=None, hostname=None, enabled=True,
     def __init__(self, connection=None, hostname=None, enabled=True,
-            channel=None, buffer_while_offline=True, app=None,
-            serializer=None):
+                 channel=None, buffer_while_offline=True, app=None,
+                 serializer=None):
         self.app = app_or_default(app or self.app)
         self.app = app_or_default(app or self.app)
         self.connection = connection
         self.connection = connection
         self.channel = channel
         self.channel = channel
@@ -128,7 +128,7 @@ class EventDispatcher(object):
         if self.enabled:
         if self.enabled:
             with self.mutex:
             with self.mutex:
                 event = Event(type, hostname=self.hostname,
                 event = Event(type, hostname=self.hostname,
-                                    clock=self.app.clock.forward(), **fields)
+                              clock=self.app.clock.forward(), **fields)
                 try:
                 try:
                     self.publisher.publish(event,
                     self.publisher.publish(event,
                                            routing_key=type.replace('-', '.'))
                                            routing_key=type.replace('-', '.'))
@@ -168,7 +168,7 @@ class EventReceiver(object):
     handlers = {}
     handlers = {}
 
 
     def __init__(self, connection, handlers=None, routing_key='#',
     def __init__(self, connection, handlers=None, routing_key='#',
-            node_id=None, app=None, queue_prefix='celeryev'):
+                 node_id=None, app=None, queue_prefix='celeryev'):
         self.app = app_or_default(app)
         self.app = app_or_default(app)
         self.connection = connection
         self.connection = connection
         if handlers is not None:
         if handlers is not None:
@@ -255,7 +255,7 @@ class Events(object):
 
 
     @contextmanager
     @contextmanager
     def default_dispatcher(self, hostname=None, enabled=True,
     def default_dispatcher(self, hostname=None, enabled=True,
-            buffer_while_offline=False):
+                           buffer_while_offline=False):
         with self.app.amqp.producer_pool.acquire(block=True) as pub:
         with self.app.amqp.producer_pool.acquire(block=True) as pub:
             with self.Dispatcher(pub.connection, hostname, enabled,
             with self.Dispatcher(pub.connection, hostname, enabled,
                                  pub.channel, buffer_while_offline) as d:
                                  pub.channel, buffer_while_offline) as d:

+ 43 - 32
celery/events/cursesmon.py

@@ -193,8 +193,10 @@ class CursesMonitor(object):  # pragma: no cover
         def callback(my, mx, xs):
         def callback(my, mx, xs):
             y = count(xs).next
             y = count(xs).next
             if not reply:
             if not reply:
-                self.win.addstr(y(), 3, 'No replies received in 1s deadline.',
-                        curses.A_BOLD + curses.color_pair(2))
+                self.win.addstr(
+                    y(), 3, 'No replies received in 1s deadline.',
+                    curses.A_BOLD + curses.color_pair(2),
+                )
                 return
                 return
 
 
             for subreply in reply:
             for subreply in reply:
@@ -260,9 +262,10 @@ class CursesMonitor(object):  # pragma: no cover
                 self.win.addstr(curline, 3, keys, curses.A_BOLD)
                 self.win.addstr(curline, 3, keys, curses.A_BOLD)
                 wrapped = wrap(value, mx - 2)
                 wrapped = wrap(value, mx - 2)
                 if len(wrapped) == 1:
                 if len(wrapped) == 1:
-                    self.win.addstr(curline, len(keys) + 3,
-                            abbr(wrapped[0],
-                                 self.screen_width - (len(keys) + 3)))
+                    self.win.addstr(
+                        curline, len(keys) + 3,
+                        abbr(wrapped[0],
+                             self.screen_width - (len(keys) + 3)))
                 else:
                 else:
                     for subline in wrapped:
                     for subline in wrapped:
                         nexty = y()
                         nexty = y()
@@ -270,12 +273,15 @@ class CursesMonitor(object):  # pragma: no cover
                             subline = ' ' * 4 + '[...]'
                             subline = ' ' * 4 + '[...]'
                         elif nexty >= my:
                         elif nexty >= my:
                             break
                             break
-                        self.win.addstr(nexty, 3,
-                                abbr(' ' * 4 + subline, self.screen_width - 4),
-                                curses.A_NORMAL)
+                        self.win.addstr(
+                            nexty, 3,
+                            abbr(' ' * 4 + subline, self.screen_width - 4),
+                            curses.A_NORMAL,
+                        )
 
 
-        return self.alert(alert_callback,
-                'Task details for %s' % self.selected_task)
+        return self.alert(
+            alert_callback, 'Task details for %s' % self.selected_task,
+        )
 
 
     def selection_traceback(self):
     def selection_traceback(self):
         if not self.selected_task:
         if not self.selected_task:
@@ -289,8 +295,10 @@ class CursesMonitor(object):  # pragma: no cover
             for line in task.traceback.split('\n'):
             for line in task.traceback.split('\n'):
                 self.win.addstr(y(), 3, line)
                 self.win.addstr(y(), 3, line)
 
 
-        return self.alert(alert_callback,
-                'Task Exception Traceback for %s' % self.selected_task)
+        return self.alert(
+            alert_callback,
+            'Task Exception Traceback for %s' % self.selected_task,
+        )
 
 
     def selection_result(self):
     def selection_result(self):
         if not self.selected_task:
         if not self.selected_task:
@@ -299,13 +307,14 @@ class CursesMonitor(object):  # pragma: no cover
         def alert_callback(my, mx, xs):
         def alert_callback(my, mx, xs):
             y = count(xs).next
             y = count(xs).next
             task = self.state.tasks[self.selected_task]
             task = self.state.tasks[self.selected_task]
-            result = getattr(task, 'result', None) or getattr(task,
-                    'exception', None)
+            result = (getattr(task, 'result', None)
+                      or getattr(task, 'exception', None))
             for line in wrap(result, mx - 2):
             for line in wrap(result, mx - 2):
                 self.win.addstr(y(), 3, line)
                 self.win.addstr(y(), 3, line)
 
 
-        return self.alert(alert_callback,
-                'Task Result for %s' % self.selected_task)
+        return self.alert(
+            alert_callback, 'Task Result for %s' % self.selected_task,
+        )
 
 
     def display_task_row(self, lineno, task):
     def display_task_row(self, lineno, task):
         state_color = self.state_colors.get(task.state)
         state_color = self.state_colors.get(task.state)
@@ -313,7 +322,8 @@ class CursesMonitor(object):  # pragma: no cover
         if task.uuid == self.selected_task:
         if task.uuid == self.selected_task:
             attr = curses.A_STANDOUT
             attr = curses.A_STANDOUT
         timestamp = datetime.utcfromtimestamp(
         timestamp = datetime.utcfromtimestamp(
-                        task.timestamp or time.time())
+            task.timestamp or time.time(),
+        )
         timef = timestamp.strftime('%H:%M:%S')
         timef = timestamp.strftime('%H:%M:%S')
         hostname = task.worker.hostname if task.worker else '*NONE*'
         hostname = task.worker.hostname if task.worker else '*NONE*'
         line = self.format_row(task.uuid, task.name,
         line = self.format_row(task.uuid, task.name,
@@ -339,7 +349,7 @@ class CursesMonitor(object):  # pragma: no cover
         blank_line()
         blank_line()
         win.addstr(y(), x, self.format_row('UUID', 'TASK',
         win.addstr(y(), x, self.format_row('UUID', 'TASK',
                                            'WORKER', 'TIME', 'STATE'),
                                            'WORKER', 'TIME', 'STATE'),
-                curses.A_BOLD | curses.A_UNDERLINE)
+                   curses.A_BOLD | curses.A_UNDERLINE)
         tasks = self.tasks
         tasks = self.tasks
         if tasks:
         if tasks:
             for row, (uuid, task) in enumerate(tasks):
             for row, (uuid, task) in enumerate(tasks):
@@ -369,8 +379,8 @@ class CursesMonitor(object):  # pragma: no cover
                     info['runtime'] = '%.2fs' % info['runtime']
                     info['runtime'] = '%.2fs' % info['runtime']
                 if 'result' in info:
                 if 'result' in info:
                     info['result'] = abbr(info['result'], 16)
                     info['result'] = abbr(info['result'], 16)
-                info = ' '.join('%s=%s' % (key, value)
-                            for key, value in info.items())
+                info = ' '.join(
+                    '%s=%s' % (key, value) for key, value in info.items())
                 detail = '... -> key i'
                 detail = '... -> key i'
             infowin = abbr(info,
             infowin = abbr(info,
                            self.screen_width - len(self.selected_str) - 2,
                            self.screen_width - len(self.selected_str) - 2,
@@ -380,7 +390,7 @@ class CursesMonitor(object):  # pragma: no cover
             if detail in infowin:
             if detail in infowin:
                 detailpos = len(infowin) - len(detail)
                 detailpos = len(infowin) - len(detail)
                 win.addstr(my - 5, x + len(self.selected_str) + detailpos,
                 win.addstr(my - 5, x + len(self.selected_str) + detailpos,
-                        detail, curses.A_BOLD)
+                           detail, curses.A_BOLD)
         else:
         else:
             win.addstr(my - 5, x, 'No task selected', curses.A_NORMAL)
             win.addstr(my - 5, x, 'No task selected', curses.A_NORMAL)
 
 
@@ -388,19 +398,21 @@ class CursesMonitor(object):  # pragma: no cover
         if self.workers:
         if self.workers:
             win.addstr(my - 4, x, self.online_str, curses.A_BOLD)
             win.addstr(my - 4, x, self.online_str, curses.A_BOLD)
             win.addstr(my - 4, x + len(self.online_str),
             win.addstr(my - 4, x + len(self.online_str),
-                    ', '.join(sorted(self.workers)), curses.A_NORMAL)
+                       ', '.join(sorted(self.workers)), curses.A_NORMAL)
         else:
         else:
             win.addstr(my - 4, x, 'No workers discovered.')
             win.addstr(my - 4, x, 'No workers discovered.')
 
 
         # Info
         # Info
         win.addstr(my - 3, x, self.info_str, curses.A_BOLD)
         win.addstr(my - 3, x, self.info_str, curses.A_BOLD)
-        win.addstr(my - 3, x + len(self.info_str),
-                'events:%s tasks:%s workers:%s/%s' % (
-                    self.state.event_count, self.state.task_count,
-                    len([w for w in self.state.workers.values()
-                            if w.alive]),
-                    len(self.state.workers)),
-                curses.A_DIM)
+        win.addstr(
+            my - 3, x + len(self.info_str),
+            'events:%s tasks:%s workers:%s/%s' % (
+                self.state.event_count, self.state.task_count,
+                len([w for w in self.state.workers.values()
+                     if w.alive]),
+                len(self.state.workers)),
+            curses.A_DIM,
+        )
 
 
         # Help
         # Help
         self.safe_add_str(my - 2, x, self.help_title, curses.A_BOLD)
         self.safe_add_str(my - 2, x, self.help_title, curses.A_BOLD)
@@ -453,9 +465,8 @@ class CursesMonitor(object):  # pragma: no cover
 
 
     @property
     @property
     def workers(self):
     def workers(self):
-        return [hostname
-                    for hostname, w in self.state.workers.items()
-                        if w.alive]
+        return [hostname for hostname, w in self.state.workers.items()
+                if w.alive]
 
 
 
 
 class DisplayThread(threading.Thread):  # pragma: no cover
 class DisplayThread(threading.Thread):  # pragma: no cover

+ 15 - 16
celery/events/dumper.py

@@ -43,33 +43,32 @@ class Dumper(object):
     def say(self, msg):
     def say(self, msg):
         say(msg, out=self.out)
         say(msg, out=self.out)
 
 
-    def on_event(self, event):
-        timestamp = datetime.utcfromtimestamp(event.pop('timestamp'))
-        type = event.pop('type').lower()
-        hostname = event.pop('hostname')
+    def on_event(self, ev):
+        timestamp = datetime.utcfromtimestamp(ev.pop('timestamp'))
+        type = ev.pop('type').lower()
+        hostname = ev.pop('hostname')
         if type.startswith('task-'):
         if type.startswith('task-'):
-            uuid = event.pop('uuid')
+            uuid = ev.pop('uuid')
             if type in ('task-received', 'task-sent'):
             if type in ('task-received', 'task-sent'):
                 task = TASK_NAMES[uuid] = '%s(%s) args=%s kwargs=%s' % (
                 task = TASK_NAMES[uuid] = '%s(%s) args=%s kwargs=%s' % (
-                        event.pop('name'), uuid,
-                        event.pop('args'),
-                        event.pop('kwargs'))
+                    ev.pop('name'), uuid,
+                    ev.pop('args'),
+                    ev.pop('kwargs'))
             else:
             else:
                 task = TASK_NAMES.get(uuid, '')
                 task = TASK_NAMES.get(uuid, '')
             return self.format_task_event(hostname, timestamp,
             return self.format_task_event(hostname, timestamp,
-                                          type, task, event)
-        fields = ', '.join('%s=%s' % (key, event[key])
-                        for key in sorted(event))
+                                          type, task, ev)
+        fields = ', '.join('%s=%s' % (key, ev[key]) for key in sorted(ev))
         sep = fields and ':' or ''
         sep = fields and ':' or ''
         self.say('%s [%s] %s%s %s' % (hostname, timestamp,
         self.say('%s [%s] %s%s %s' % (hostname, timestamp,
                                       humanize_type(type), sep, fields))
                                       humanize_type(type), sep, fields))
 
 
-    def format_task_event(self, hostname, timestamp, type, task, event):
-        fields = ', '.join('%s=%s' % (key, event[key])
-                        for key in sorted(event))
+    def format_task_event(self, hostname, timestamp, type, task, ev):
+        fields = ', '.join('%s=%s' % (key, ev[key]) for key in sorted(ev))
         sep = fields and ':' or ''
         sep = fields and ':' or ''
-        self.say('%s [%s] %s%s %s %s' % (hostname, timestamp,
-                    humanize_type(type), sep, task, fields))
+        self.say('%s [%s] %s%s %s %s' % (
+            hostname, timestamp, humanize_type(type), sep, task, fields,
+        ))
 
 
 
 
 def evdump(app=None, out=sys.stdout):
 def evdump(app=None, out=sys.stdout):

+ 2 - 2
celery/events/snapshot.py

@@ -35,7 +35,7 @@ class Polaroid(object):
     _ctref = None
     _ctref = None
 
 
     def __init__(self, state, freq=1.0, maxrate=None,
     def __init__(self, state, freq=1.0, maxrate=None,
-            cleanup_freq=3600.0, timer=None, app=None):
+                 cleanup_freq=3600.0, timer=None, app=None):
         self.app = app_or_default(app)
         self.app = app_or_default(app)
         self.state = state
         self.state = state
         self.freq = freq
         self.freq = freq
@@ -86,7 +86,7 @@ class Polaroid(object):
 
 
 
 
 def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
 def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
-        logfile=None, pidfile=None, timer=None, app=None):
+          logfile=None, pidfile=None, timer=None, app=None):
     app = app_or_default(app)
     app = app_or_default(app)
 
 
     if pidfile:
     if pidfile:

+ 12 - 11
celery/events/state.py

@@ -36,7 +36,7 @@ HEARTBEAT_EXPIRE_WINDOW = 200
 
 
 
 
 def heartbeat_expires(timestamp, freq=60,
 def heartbeat_expires(timestamp, freq=60,
-        expire_window=HEARTBEAT_EXPIRE_WINDOW):
+                      expire_window=HEARTBEAT_EXPIRE_WINDOW):
     return timestamp + freq * (expire_window / 1e2)
     return timestamp + freq * (expire_window / 1e2)
 
 
 
 
@@ -210,7 +210,7 @@ class State(object):
     task_count = 0
     task_count = 0
 
 
     def __init__(self, callback=None,
     def __init__(self, callback=None,
-            max_workers_in_memory=5000, max_tasks_in_memory=10000):
+                 max_workers_in_memory=5000, max_tasks_in_memory=10000):
         self.workers = LRUCache(limit=max_workers_in_memory)
         self.workers = LRUCache(limit=max_workers_in_memory)
         self.tasks = LRUCache(limit=max_tasks_in_memory)
         self.tasks = LRUCache(limit=max_tasks_in_memory)
         self.event_callback = callback
         self.event_callback = callback
@@ -233,8 +233,9 @@ class State(object):
 
 
     def _clear_tasks(self, ready=True):
     def _clear_tasks(self, ready=True):
         if ready:
         if ready:
-            in_progress = dict((uuid, task) for uuid, task in self.itertasks()
-                                if task.state not in states.READY_STATES)
+            in_progress = dict(
+                (uuid, task) for uuid, task in self.itertasks()
+                if task.state not in states.READY_STATES)
             self.tasks.clear()
             self.tasks.clear()
             self.tasks.update(in_progress)
             self.tasks.update(in_progress)
         else:
         else:
@@ -257,7 +258,7 @@ class State(object):
             worker.update(kwargs)
             worker.update(kwargs)
         except KeyError:
         except KeyError:
             worker = self.workers[hostname] = Worker(
             worker = self.workers[hostname] = Worker(
-                    hostname=hostname, **kwargs)
+                hostname=hostname, **kwargs)
         return worker
         return worker
 
 
     def get_or_create_task(self, uuid):
     def get_or_create_task(self, uuid):
@@ -329,9 +330,9 @@ class State(object):
         Returns a list of `(uuid, task)` tuples.
         Returns a list of `(uuid, task)` tuples.
 
 
         """
         """
-        sorted_tasks = self._sort_tasks_by_time((uuid, task)
-                for uuid, task in self.tasks.iteritems()
-                    if task.name == name)
+        sorted_tasks = self._sort_tasks_by_time(
+            (uuid, task) for uuid, task in self.tasks.iteritems()
+            if task.name == name)
 
 
         return sorted_tasks[0:limit or None]
         return sorted_tasks[0:limit or None]
 
 
@@ -341,9 +342,9 @@ class State(object):
         Returns a list of `(uuid, task)` tuples.
         Returns a list of `(uuid, task)` tuples.
 
 
         """
         """
-        return self._sort_tasks_by_time((uuid, task)
-                for uuid, task in self.itertasks(limit)
-                    if task.worker.hostname == hostname)
+        return self._sort_tasks_by_time(
+            (uuid, task) for uuid, task in self.itertasks(limit)
+            if task.worker.hostname == hostname)
 
 
     def task_types(self):
     def task_types(self):
         """Returns a list of all seen task types."""
         """Returns a list of all seen task types."""

+ 1 - 1
celery/loaders/__init__.py

@@ -24,7 +24,7 @@ def get_loader_cls(loader):
 
 
 
 
 @deprecated(deprecation='2.5', removal='4.0',
 @deprecated(deprecation='2.5', removal='4.0',
-        alternative='celery.current_app.loader')
+            alternative='celery.current_app.loader')
 def current_loader():
 def current_loader():
     return current_app.loader
     return current_app.loader
 
 

+ 24 - 22
celery/loaders/base.py

@@ -111,15 +111,19 @@ class BaseLoader(object):
         return importlib.import_module(module, package=package)
         return importlib.import_module(module, package=package)
 
 
     def import_from_cwd(self, module, imp=None, package=None):
     def import_from_cwd(self, module, imp=None, package=None):
-        return import_from_cwd(module,
-                self.import_module if imp is None else imp,
-                package=package)
+        return import_from_cwd(
+            module,
+            self.import_module if imp is None else imp,
+            package=package,
+        )
 
 
     def import_default_modules(self):
     def import_default_modules(self):
-        return [self.import_task_module(m)
-            for m in set(maybe_list(self.app.conf.CELERY_IMPORTS))
-                   | set(maybe_list(self.app.conf.CELERY_INCLUDE))
-                   | self.builtin_modules]
+        return [
+            self.import_task_module(m) for m in (
+                set(maybe_list(self.app.conf.CELERY_IMPORTS))
+                | set(maybe_list(self.app.conf.CELERY_INCLUDE))
+                | self.builtin_modules)
+        ]
 
 
     def init_worker(self):
     def init_worker(self):
         if not self.worker_initialized:
         if not self.worker_initialized:
@@ -162,25 +166,23 @@ class BaseLoader(object):
             self.find_module(name)
             self.find_module(name)
         except NotAPackage:
         except NotAPackage:
             if name.endswith('.py'):
             if name.endswith('.py'):
-                raise NotAPackage, NotAPackage(
-                        CONFIG_WITH_SUFFIX % {
-                            'module': name,
-                            'suggest': name[:-3]}), sys.exc_info()[2]
+                raise NotAPackage, NotAPackage(CONFIG_WITH_SUFFIX % {
+                    'module': name, 'suggest': name[:-3]}), sys.exc_info()[2]
             raise NotAPackage, NotAPackage(
             raise NotAPackage, NotAPackage(
-                    CONFIG_INVALID_NAME % {
-                        'module': name}), sys.exc_info()[2]
+                CONFIG_INVALID_NAME % {'module': name}), sys.exc_info()[2]
         else:
         else:
             return self.import_from_cwd(name)
             return self.import_from_cwd(name)
 
 
     def find_module(self, module):
     def find_module(self, module):
         return find_module(module)
         return find_module(module)
 
 
-    def cmdline_config_parser(self, args, namespace='celery',
-                re_type=re.compile(r'\((\w+)\)'),
-                extra_types={'json': anyjson.loads},
-                override_types={'tuple': 'json',
-                                'list': 'json',
-                                'dict': 'json'}):
+    def cmdline_config_parser(
+            self, args, namespace='celery',
+            re_type=re.compile(r'\((\w+)\)'),
+            extra_types={'json': anyjson.loads},
+            override_types={'tuple': 'json',
+                            'list': 'json',
+                            'dict': 'json'}):
         from celery.app.defaults import Option, NAMESPACES
         from celery.app.defaults import Option, NAMESPACES
         namespace = namespace.upper()
         namespace = namespace.upper()
         typemap = dict(Option.typemap, **extra_types)
         typemap = dict(Option.typemap, **extra_types)
@@ -222,9 +224,9 @@ class BaseLoader(object):
         return dict(map(getarg, args))
         return dict(map(getarg, args))
 
 
     def mail_admins(self, subject, body, fail_silently=False,
     def mail_admins(self, subject, body, fail_silently=False,
-            sender=None, to=None, host=None, port=None,
-            user=None, password=None, timeout=None,
-            use_ssl=False, use_tls=False):
+                    sender=None, to=None, host=None, port=None,
+                    user=None, password=None, timeout=None,
+                    use_ssl=False, use_tls=False):
         message = self.mail.Message(sender=sender, to=to,
         message = self.mail.Message(sender=sender, to=to,
                                     subject=safe_str(subject),
                                     subject=safe_str(subject),
                                     body=safe_str(body))
                                     body=safe_str(body))

+ 1 - 1
celery/loaders/default.py

@@ -33,7 +33,7 @@ class Loader(BaseLoader):
         """Read configuration from :file:`celeryconfig.py` and configure
         """Read configuration from :file:`celeryconfig.py` and configure
         celery and Django so it can be used by regular Python."""
         celery and Django so it can be used by regular Python."""
         configname = os.environ.get('CELERY_CONFIG_MODULE',
         configname = os.environ.get('CELERY_CONFIG_MODULE',
-                                     DEFAULT_CONFIG_MODULE)
+                                    DEFAULT_CONFIG_MODULE)
         try:
         try:
             usercfg = self._import_config_module(configname)
             usercfg = self._import_config_module(configname)
         except ImportError:
         except ImportError:

+ 2 - 2
celery/local.py

@@ -17,7 +17,7 @@ import sys
 
 
 
 
 def symbol_by_name(name, aliases={}, imp=None, package=None,
 def symbol_by_name(name, aliases={}, imp=None, package=None,
-        sep='.', default=None, **kwargs):
+                   sep='.', default=None, **kwargs):
     """Get symbol by qualified name.
     """Get symbol by qualified name.
 
 
     The name should be the full dot-separated path to the class::
     The name should be the full dot-separated path to the class::
@@ -67,7 +67,7 @@ def symbol_by_name(name, aliases={}, imp=None, package=None,
             module = imp(module_name, package=package, **kwargs)
             module = imp(module_name, package=package, **kwargs)
         except ValueError, exc:
         except ValueError, exc:
             raise ValueError, ValueError(
             raise ValueError, ValueError(
-                    "Couldn't import %r: %s" % (name, exc)), sys.exc_info()[2]
+                "Couldn't import %r: %s" % (name, exc)), sys.exc_info()[2]
         return getattr(module, cls_name) if cls_name else module
         return getattr(module, cls_name) if cls_name else module
     except (ImportError, AttributeError):
     except (ImportError, AttributeError):
         if default is None:
         if default is None:

+ 2 - 2
celery/platforms.py

@@ -263,7 +263,7 @@ class DaemonContext(object):
     _is_open = False
     _is_open = False
 
 
     def __init__(self, pidfile=None, workdir=None, umask=None,
     def __init__(self, pidfile=None, workdir=None, umask=None,
-            fake=False, after_chdir=None, **kwargs):
+                 fake=False, after_chdir=None, **kwargs):
         self.workdir = workdir or DAEMON_WORKDIR
         self.workdir = workdir or DAEMON_WORKDIR
         self.umask = DAEMON_UMASK if umask is None else umask
         self.umask = DAEMON_UMASK if umask is None else umask
         self.fake = fake
         self.fake = fake
@@ -448,7 +448,7 @@ def initgroups(uid, gid):
     if hasattr(os, 'initgroups'):  # Python 2.7+
     if hasattr(os, 'initgroups'):  # Python 2.7+
         return os.initgroups(username, gid)
         return os.initgroups(username, gid)
     groups = [gr.gr_gid for gr in grp.getgrall()
     groups = [gr.gr_gid for gr in grp.getgrall()
-                            if username in gr.gr_mem]
+              if username in gr.gr_mem]
     setgroups(groups)
     setgroups(groups)
 
 
 
 

+ 5 - 5
celery/result.py

@@ -62,7 +62,7 @@ class AsyncResult(ResultBase):
     parent = None
     parent = None
 
 
     def __init__(self, id, backend=None, task_name=None,
     def __init__(self, id, backend=None, task_name=None,
-            app=None, parent=None):
+                 app=None, parent=None):
         self.app = app_or_default(app or self.app)
         self.app = app_or_default(app or self.app)
         self.id = id
         self.id = id
         self.backend = backend or self.app.backend
         self.backend = backend or self.app.backend
@@ -116,8 +116,8 @@ class AsyncResult(ResultBase):
 
 
         """
         """
         return self.backend.wait_for(self.id, timeout=timeout,
         return self.backend.wait_for(self.id, timeout=timeout,
-                                              propagate=propagate,
-                                              interval=interval)
+                                     propagate=propagate,
+                                     interval=interval)
     wait = get  # deprecated alias to :meth:`get`.
     wait = get  # deprecated alias to :meth:`get`.
 
 
     def collect(self, intermediate=False, **kwargs):
     def collect(self, intermediate=False, **kwargs):
@@ -421,7 +421,7 @@ class ResultSet(ResultBase):
         """
         """
         elapsed = 0.0
         elapsed = 0.0
         results = OrderedDict((result.id, copy(result))
         results = OrderedDict((result.id, copy(result))
-                                for result in self.results)
+                              for result in self.results)
 
 
         while results:
         while results:
             removed = set()
             removed = set()
@@ -449,7 +449,7 @@ class ResultSet(ResultBase):
 
 
         """
         """
         return (self.join_native if self.supports_native_join else self.join)(
         return (self.join_native if self.supports_native_join else self.join)(
-                    timeout=timeout, propagate=propagate, interval=interval)
+            timeout=timeout, propagate=propagate, interval=interval)
 
 
     def join(self, timeout=None, propagate=True, interval=0.5):
     def join(self, timeout=None, propagate=True, interval=0.5):
         """Gathers the results of all tasks as a list in order.
         """Gathers the results of all tasks as a list in order.

+ 37 - 35
celery/schedules.py

@@ -172,10 +172,11 @@ class crontab_parser(object):
         self.max_ = max_
         self.max_ = max_
         self.min_ = min_
         self.min_ = min_
         self.pats = (
         self.pats = (
-                (re.compile(self._range + self._steps), self._range_steps),
-                (re.compile(self._range), self._expand_range),
-                (re.compile(self._star + self._steps), self._star_steps),
-                (re.compile('^' + self._star + '$'), self._expand_star))
+            (re.compile(self._range + self._steps), self._range_steps),
+            (re.compile(self._range), self._expand_range),
+            (re.compile(self._star + self._steps), self._star_steps),
+            (re.compile('^' + self._star + '$'), self._expand_star),
+        )
 
 
     def parse(self, spec):
     def parse(self, spec):
         acc = set()
         acc = set()
@@ -339,17 +340,17 @@ class crontab(schedule):
             result = set(cronspec)
             result = set(cronspec)
         else:
         else:
             raise TypeError(
             raise TypeError(
-                    'Argument cronspec needs to be of any of the '
-                    'following types: int, basestring, or an iterable type. '
-                    "'%s' was given." % type(cronspec))
+                'Argument cronspec needs to be of any of the '
+                'following types: int, basestring, or an iterable type. '
+                "'%s' was given." % type(cronspec))
 
 
         # assure the result does not preceed the min or exceed the max
         # assure the result does not preceed the min or exceed the max
         for number in result:
         for number in result:
             if number >= max_ + min_ or number < min_:
             if number >= max_ + min_ or number < min_:
                 raise ValueError(
                 raise ValueError(
-                        'Invalid crontab pattern. Valid '
-                        "range is %d-%d. '%d' was found." %
-                        (min_, max_ - 1 + min_, number))
+                    'Invalid crontab pattern. Valid '
+                    "range is %d-%d. '%d' was found." % (
+                        min_, max_ - 1 + min_, number))
 
 
         return result
         return result
 
 
@@ -376,9 +377,9 @@ class crontab(schedule):
         def roll_over():
         def roll_over():
             while 1:
             while 1:
                 flag = (datedata.dom == len(days_of_month) or
                 flag = (datedata.dom == len(days_of_month) or
-                            day_out_of_range(datedata.year,
-                                             months_of_year[datedata.moy],
-                                             days_of_month[datedata.dom]))
+                        day_out_of_range(datedata.year,
+                                         months_of_year[datedata.moy],
+                                         days_of_month[datedata.dom]))
                 if flag:
                 if flag:
                     datedata.dom = 0
                     datedata.dom = 0
                     datedata.moy += 1
                     datedata.moy += 1
@@ -398,11 +399,12 @@ class crontab(schedule):
                 datedata.moy = 0
                 datedata.moy = 0
         roll_over()
         roll_over()
 
 
-        while not datetime(
-                year=datedata.year,
-                month=months_of_year[datedata.moy],
-                day=days_of_month[datedata.dom]) \
-                    .isoweekday() % 7 in self.day_of_week:
+        while 1:
+            th = datetime(year=datedata.year,
+                          month=months_of_year[datedata.moy],
+                          day=days_of_month[datedata.dom])
+            if th.isoweekday() % 7 in self.day_of_week:
+                break
             datedata.dom += 1
             datedata.dom += 1
             roll_over()
             roll_over()
 
 
@@ -415,7 +417,7 @@ class crontab(schedule):
                              microsecond=0)
                              microsecond=0)
 
 
     def __init__(self, minute='*', hour='*', day_of_week='*',
     def __init__(self, minute='*', hour='*', day_of_week='*',
-            day_of_month='*', month_of_year='*', nowfun=None):
+                 day_of_month='*', month_of_year='*', nowfun=None):
         self._orig_minute = minute
         self._orig_minute = minute
         self._orig_hour = hour
         self._orig_hour = hour
         self._orig_day_of_week = day_of_week
         self._orig_day_of_week = day_of_week
@@ -432,12 +434,13 @@ class crontab(schedule):
         return (self.nowfun or self.app.now)()
         return (self.nowfun or self.app.now)()
 
 
     def __repr__(self):
     def __repr__(self):
-        return ('<crontab: %s %s %s %s %s (m/h/d/dM/MY)>' %
-                        (_weak_bool(self._orig_minute) or '*',
-                         _weak_bool(self._orig_hour) or '*',
-                         _weak_bool(self._orig_day_of_week) or '*',
-                         _weak_bool(self._orig_day_of_month) or '*',
-                         _weak_bool(self._orig_month_of_year) or '*'))
+        return ('<crontab: %s %s %s %s %s (m/h/d/dM/MY)>' % (
+            _weak_bool(self._orig_minute) or '*',
+            _weak_bool(self._orig_hour) or '*',
+            _weak_bool(self._orig_day_of_week) or '*',
+            _weak_bool(self._orig_day_of_month) or '*',
+            _weak_bool(self._orig_month_of_year) or '*',
+        ))
 
 
     def __reduce__(self):
     def __reduce__(self):
         return (self.__class__, (self._orig_minute,
         return (self.__class__, (self._orig_minute,
@@ -453,27 +456,27 @@ class crontab(schedule):
         dow_num = last_run_at.isoweekday() % 7  # Sunday is day 0, not day 7
         dow_num = last_run_at.isoweekday() % 7  # Sunday is day 0, not day 7
 
 
         execute_this_date = (last_run_at.month in self.month_of_year and
         execute_this_date = (last_run_at.month in self.month_of_year and
-                                last_run_at.day in self.day_of_month and
-                                    dow_num in self.day_of_week)
+                             last_run_at.day in self.day_of_month and
+                             dow_num in self.day_of_week)
 
 
         execute_this_hour = (execute_this_date and
         execute_this_hour = (execute_this_date and
-                                last_run_at.hour in self.hour and
-                                    last_run_at.minute < max(self.minute))
+                             last_run_at.hour in self.hour and
+                             last_run_at.minute < max(self.minute))
 
 
         if execute_this_hour:
         if execute_this_hour:
             next_minute = min(minute for minute in self.minute
             next_minute = min(minute for minute in self.minute
-                                        if minute > last_run_at.minute)
+                              if minute > last_run_at.minute)
             delta = relativedelta(minute=next_minute,
             delta = relativedelta(minute=next_minute,
                                   second=0,
                                   second=0,
                                   microsecond=0)
                                   microsecond=0)
         else:
         else:
             next_minute = min(self.minute)
             next_minute = min(self.minute)
             execute_today = (execute_this_date and
             execute_today = (execute_this_date and
-                                last_run_at.hour < max(self.hour))
+                             last_run_at.hour < max(self.hour))
 
 
             if execute_today:
             if execute_today:
                 next_hour = min(hour for hour in self.hour
                 next_hour = min(hour for hour in self.hour
-                                        if hour > last_run_at.hour)
+                                if hour > last_run_at.hour)
                 delta = relativedelta(hour=next_hour,
                 delta = relativedelta(hour=next_hour,
                                       minute=next_minute,
                                       minute=next_minute,
                                       second=0,
                                       second=0,
@@ -481,11 +484,10 @@ class crontab(schedule):
             else:
             else:
                 next_hour = min(self.hour)
                 next_hour = min(self.hour)
                 all_dom_moy = (self._orig_day_of_month == '*' and
                 all_dom_moy = (self._orig_day_of_month == '*' and
-                                  self._orig_month_of_year == '*')
+                               self._orig_month_of_year == '*')
                 if all_dom_moy:
                 if all_dom_moy:
                     next_day = min([day for day in self.day_of_week
                     next_day = min([day for day in self.day_of_week
-                                        if day > dow_num] or
-                                self.day_of_week)
+                                    if day > dow_num] or self.day_of_week)
                     add_week = next_day == dow_num
                     add_week = next_day == dow_num
 
 
                     delta = relativedelta(weeks=add_week and 1 or 0,
                     delta = relativedelta(weeks=add_week and 1 or 0,

+ 1 - 1
celery/security/__init__.py

@@ -40,7 +40,7 @@ def disable_untrusted_serializers(whitelist=None):
 
 
 
 
 def setup_security(allowed_serializers=None, key=None, cert=None, store=None,
 def setup_security(allowed_serializers=None, key=None, cert=None, store=None,
-        digest='sha1', serializer='json'):
+                   digest='sha1', serializer='json'):
     """Setup the message-signing serializer.
     """Setup the message-signing serializer.
 
 
     Disables untrusted serializers and if configured to use the ``auth``
     Disables untrusted serializers and if configured to use the ``auth``

+ 7 - 7
celery/security/serialization.py

@@ -30,7 +30,7 @@ def b64decode(s):
 class SecureSerializer(object):
 class SecureSerializer(object):
 
 
     def __init__(self, key=None, cert=None, cert_store=None,
     def __init__(self, key=None, cert=None, cert_store=None,
-            digest='sha1', serializer='json'):
+                 digest='sha1', serializer='json'):
         self._key = key
         self._key = key
         self._cert = cert
         self._cert = cert
         self._cert_store = cert_store
         self._cert_store = cert_store
@@ -43,7 +43,7 @@ class SecureSerializer(object):
         assert self._cert is not None
         assert self._cert is not None
         with reraise_errors('Unable to serialize: %r', (Exception, )):
         with reraise_errors('Unable to serialize: %r', (Exception, )):
             content_type, content_encoding, body = encode(
             content_type, content_encoding, body = encode(
-                    data, serializer=self._serializer)
+                data, serializer=self._serializer)
             # What we sign is the serialized body, not the body itself.
             # What we sign is the serialized body, not the body itself.
             # this way the receiver doesn't have to decode the contents
             # this way the receiver doesn't have to decode the contents
             # to verify the signature (and thus avoiding potential flaws
             # to verify the signature (and thus avoiding potential flaws
@@ -62,21 +62,21 @@ class SecureSerializer(object):
                                        payload['body'])
                                        payload['body'])
             self._cert_store[signer].verify(body, signature, self._digest)
             self._cert_store[signer].verify(body, signature, self._digest)
         return decode(body, payload['content_type'],
         return decode(body, payload['content_type'],
-                            payload['content_encoding'], force=True)
+                      payload['content_encoding'], force=True)
 
 
     def _pack(self, body, content_type, content_encoding, signer, signature,
     def _pack(self, body, content_type, content_encoding, signer, signature,
-            sep='\x00\x01'):
+              sep='\x00\x01'):
         return b64encode(sep.join([signer, signature,
         return b64encode(sep.join([signer, signature,
                                    content_type, content_encoding, body]))
                                    content_type, content_encoding, body]))
 
 
     def _unpack(self, payload, sep='\x00\x01',
     def _unpack(self, payload, sep='\x00\x01',
-            fields=('signer', 'signature', 'content_type',
-                    'content_encoding', 'body')):
+                fields=('signer', 'signature', 'content_type',
+                        'content_encoding', 'body')):
         return dict(zip(fields, b64decode(payload).split(sep)))
         return dict(zip(fields, b64decode(payload).split(sep)))
 
 
 
 
 def register_auth(key=None, cert=None, store=None, digest='sha1',
 def register_auth(key=None, cert=None, store=None, digest='sha1',
-        serializer='json'):
+                  serializer='json'):
     """register security serializer"""
     """register security serializer"""
     s = SecureSerializer(key and PrivateKey(key),
     s = SecureSerializer(key and PrivateKey(key),
                          cert and Certificate(cert),
                          cert and Certificate(cert),

+ 2 - 1
celery/task/__init__.py

@@ -38,7 +38,8 @@ class module(MagicModule):
         return self.task(*args, **kwargs)
         return self.task(*args, **kwargs)
 
 
 
 
-old_module, new_module = recreate_module(__name__,  # pragma: no cover
+old_module, new_module = recreate_module(  # pragma: no cover
+    __name__,
     by_module={
     by_module={
         'celery.task.base': ['BaseTask', 'Task', 'PeriodicTask',
         'celery.task.base': ['BaseTask', 'Task', 'PeriodicTask',
                              'task', 'periodic_task'],
                              'task', 'periodic_task'],

+ 14 - 12
celery/task/base.py

@@ -92,10 +92,10 @@ class Task(BaseTask):
                 ...
                 ...
         """
         """
         return self._get_app().connection(
         return self._get_app().connection(
-                connect_timeout=connect_timeout)
+            connect_timeout=connect_timeout)
 
 
     def get_publisher(self, connection=None, exchange=None,
     def get_publisher(self, connection=None, exchange=None,
-            connect_timeout=None, exchange_type=None, **options):
+                      connect_timeout=None, exchange_type=None, **options):
         """Deprecated method to get the task publisher (now called producer).
         """Deprecated method to get the task publisher (now called producer).
 
 
         Should be replaced with :class:`@amqp.TaskProducer`:
         Should be replaced with :class:`@amqp.TaskProducer`:
@@ -111,9 +111,11 @@ class Task(BaseTask):
         if exchange_type is None:
         if exchange_type is None:
             exchange_type = self.exchange_type
             exchange_type = self.exchange_type
         connection = connection or self.establish_connection(connect_timeout)
         connection = connection or self.establish_connection(connect_timeout)
-        return self._get_app().amqp.TaskProducer(connection,
-                exchange=exchange and Exchange(exchange, exchange_type),
-                routing_key=self.routing_key, **options)
+        return self._get_app().amqp.TaskProducer(
+            connection,
+            exchange=exchange and Exchange(exchange, exchange_type),
+            routing_key=self.routing_key, **options
+        )
 
 
     @classmethod
     @classmethod
     def get_consumer(self, connection=None, queues=None, **kwargs):
     def get_consumer(self, connection=None, queues=None, **kwargs):
@@ -142,19 +144,19 @@ class PeriodicTask(Task):
     def __init__(self):
     def __init__(self):
         if not hasattr(self, 'run_every'):
         if not hasattr(self, 'run_every'):
             raise NotImplementedError(
             raise NotImplementedError(
-                    'Periodic tasks must have a run_every attribute')
+                'Periodic tasks must have a run_every attribute')
         self.run_every = maybe_schedule(self.run_every, self.relative)
         self.run_every = maybe_schedule(self.run_every, self.relative)
         super(PeriodicTask, self).__init__()
         super(PeriodicTask, self).__init__()
 
 
     @classmethod
     @classmethod
     def on_bound(cls, app):
     def on_bound(cls, app):
         app.conf.CELERYBEAT_SCHEDULE[cls.name] = {
         app.conf.CELERYBEAT_SCHEDULE[cls.name] = {
-                'task': cls.name,
-                'schedule': cls.run_every,
-                'args': (),
-                'kwargs': {},
-                'options': cls.options or {},
-                'relative': cls.relative,
+            'task': cls.name,
+            'schedule': cls.run_every,
+            'args': (),
+            'kwargs': {},
+            'options': cls.options or {},
+            'relative': cls.relative,
         }
         }
 
 
 
 

+ 2 - 2
celery/task/http.py

@@ -58,7 +58,7 @@ else:
         """With a dict's items() tuple return a new dict with any utf-8
         """With a dict's items() tuple return a new dict with any utf-8
         keys/values encoded."""
         keys/values encoded."""
         return dict((key.encode('utf-8'), maybe_utf8(value))
         return dict((key.encode('utf-8'), maybe_utf8(value))
-                        for key, value in tup)
+                    for key, value in tup)
 
 
 
 
 def extract_response(raw_response, loads=anyjson.loads):
 def extract_response(raw_response, loads=anyjson.loads):
@@ -69,7 +69,7 @@ def extract_response(raw_response, loads=anyjson.loads):
         payload = loads(raw_response)
         payload = loads(raw_response)
     except ValueError, exc:
     except ValueError, exc:
         raise InvalidResponseError, InvalidResponseError(
         raise InvalidResponseError, InvalidResponseError(
-                str(exc)), sys.exc_info()[2]
+            str(exc)), sys.exc_info()[2]
 
 
     status = payload['status']
     status = payload['status']
     if status == 'success':
     if status == 'success':

+ 2 - 2
celery/task/sets.py

@@ -39,7 +39,7 @@ class TaskSet(list):
         self.total = len(self)  # XXX compat
         self.total = len(self)  # XXX compat
 
 
     def apply_async(self, connection=None, connect_timeout=None,
     def apply_async(self, connection=None, connect_timeout=None,
-            publisher=None, taskset_id=None):
+                    publisher=None, taskset_id=None):
         """Apply TaskSet."""
         """Apply TaskSet."""
         app = self.app
         app = self.app
 
 
@@ -59,7 +59,7 @@ class TaskSet(list):
 
 
     def _async_results(self, taskset_id, publisher):
     def _async_results(self, taskset_id, publisher):
         return [task.apply_async(taskset_id=taskset_id, publisher=publisher)
         return [task.apply_async(taskset_id=taskset_id, publisher=publisher)
-                    for task in self]
+                for task in self]
 
 
     def apply(self, taskset_id=None):
     def apply(self, taskset_id=None):
         """Applies the TaskSet locally by blocking until all tasks return."""
         """Applies the TaskSet locally by blocking until all tasks return."""

+ 2 - 2
celery/task/trace.py

@@ -140,7 +140,7 @@ class TraceInfo(object):
 
 
 
 
 def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
 def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
-        Info=TraceInfo, eager=False, propagate=False):
+                 Info=TraceInfo, eager=False, propagate=False):
     """Builts a function that tracing the tasks execution; catches all
     """Builts a function that tracing the tasks execution; catches all
     exceptions, and saves the state and result of the task execution
     exceptions, and saves the state and result of the task execution
     to the result backend.
     to the result backend.
@@ -315,7 +315,7 @@ def _fast_trace_task(task, uuid, args, kwargs, request={}):
 def eager_trace_task(task, uuid, args, kwargs, request=None, **opts):
 def eager_trace_task(task, uuid, args, kwargs, request=None, **opts):
     opts.setdefault('eager', True)
     opts.setdefault('eager', True)
     return build_tracer(task.name, task, **opts)(
     return build_tracer(task.name, task, **opts)(
-            uuid, args, kwargs, request)
+        uuid, args, kwargs, request)
 
 
 
 
 def report_internal_error(task, exc):
 def report_internal_error(task, exc):

+ 2 - 2
celery/tests/__init__.py

@@ -46,7 +46,7 @@ def teardown():
     # Make sure there are no remaining threads at shutdown.
     # Make sure there are no remaining threads at shutdown.
     import threading
     import threading
     remaining_threads = [thread for thread in threading.enumerate()
     remaining_threads = [thread for thread in threading.enumerate()
-                            if thread.getName() != 'MainThread']
+                         if thread.getName() != 'MainThread']
     if remaining_threads:
     if remaining_threads:
         sys.stderr.write(
         sys.stderr.write(
             '\n\n**WARNING**: Remaining threads at teardown: %r...\n' % (
             '\n\n**WARNING**: Remaining threads at teardown: %r...\n' % (
@@ -70,7 +70,7 @@ def find_distribution_modules(name=__name__, file=__file__):
 
 
 
 
 def import_all_modules(name=__name__, file=__file__,
 def import_all_modules(name=__name__, file=__file__,
-        skip=['celery.decorators', 'celery.contrib.batches']):
+                       skip=['celery.decorators', 'celery.contrib.batches']):
     for module in find_distribution_modules(name, file):
     for module in find_distribution_modules(name, file):
         if module not in skip:
         if module not in skip:
             try:
             try:

+ 2 - 2
celery/tests/app/test_amqp.py

@@ -107,8 +107,8 @@ class test_PublisherPool(AppCase):
 class test_Queues(AppCase):
 class test_Queues(AppCase):
 
 
     def test_queues_format(self):
     def test_queues_format(self):
-        prev, self.app.amqp.queues._consume_from = \
-                self.app.amqp.queues._consume_from, {}
+        prev, self.app.amqp.queues._consume_from = (
+            self.app.amqp.queues._consume_from, {})
         try:
         try:
             self.assertEqual(self.app.amqp.queues.format(), '')
             self.assertEqual(self.app.amqp.queues.format(), '')
         finally:
         finally:

+ 17 - 13
celery/tests/app/test_app.py

@@ -34,8 +34,8 @@ class Object(object):
 
 
 def _get_test_config():
 def _get_test_config():
     return dict((key, getattr(config, key))
     return dict((key, getattr(config, key))
-                    for key in dir(config)
-                        if key.isupper() and not key.startswith('_'))
+                for key in dir(config)
+                if key.isupper() and not key.startswith('_'))
 
 
 test_config = _get_test_config()
 test_config = _get_test_config()
 
 
@@ -195,7 +195,7 @@ class test_App(Case):
 
 
         app = Celery(set_as_current=False)
         app = Celery(set_as_current=False)
         app.conf.CELERY_ANNOTATIONS = {
         app.conf.CELERY_ANNOTATIONS = {
-                adX.name: {'@__call__': deco}
+            adX.name: {'@__call__': deco}
         }
         }
         adX.bind(app)
         adX.bind(app)
         self.assertIs(adX.app, app)
         self.assertIs(adX.app, app)
@@ -380,16 +380,19 @@ class test_App(Case):
         self.assertTrue(self.app.mail_admins('Subject', 'Body'))
         self.assertTrue(self.app.mail_admins('Subject', 'Body'))
 
 
     def test_amqp_get_broker_info(self):
     def test_amqp_get_broker_info(self):
-        self.assertDictContainsSubset({'hostname': 'localhost',
-                                       'userid': 'guest',
-                                       'password': 'guest',
-                                       'virtual_host': '/'},
-                            self.app.connection('amqp://').info())
+        self.assertDictContainsSubset(
+            {'hostname': 'localhost',
+             'userid': 'guest',
+             'password': 'guest',
+             'virtual_host': '/'},
+            self.app.connection('amqp://').info(),
+        )
         self.app.conf.BROKER_PORT = 1978
         self.app.conf.BROKER_PORT = 1978
         self.app.conf.BROKER_VHOST = 'foo'
         self.app.conf.BROKER_VHOST = 'foo'
-        self.assertDictContainsSubset({'port': 1978,
-                                       'virtual_host': 'foo'},
-                    self.app.connection('amqp://:1978/foo').info())
+        self.assertDictContainsSubset(
+            {'port': 1978, 'virtual_host': 'foo'},
+            self.app.connection('amqp://:1978/foo').info(),
+        )
         conn = self.app.connection('amqp:////value')
         conn = self.app.connection('amqp:////value')
         self.assertDictContainsSubset({'virtual_host': '/value'},
         self.assertDictContainsSubset({'virtual_host': '/value'},
                                       conn.info())
                                       conn.info())
@@ -442,8 +445,9 @@ class test_App(Case):
             chan.close()
             chan.close()
         assert conn.transport_cls == 'memory'
         assert conn.transport_cls == 'memory'
 
 
-        prod = self.app.amqp.TaskProducer(conn,
-                exchange=Exchange('foo_exchange'))
+        prod = self.app.amqp.TaskProducer(
+            conn, exchange=Exchange('foo_exchange'),
+        )
 
 
         dispatcher = Dispatcher()
         dispatcher = Dispatcher()
         self.assertTrue(prod.publish_task('footask', (), {},
         self.assertTrue(prod.publish_task('footask', (), {},

+ 1 - 1
celery/tests/app/test_beat.py

@@ -266,7 +266,7 @@ class test_Scheduler(Case):
         nums = [600, 300, 650, 120, 250, 36]
         nums = [600, 300, 650, 120, 250, 36]
         s = dict(('test_ticks%s' % i,
         s = dict(('test_ticks%s' % i,
                  {'schedule': mocked_schedule(False, j)})
                  {'schedule': mocked_schedule(False, j)})
-                    for i, j in enumerate(nums))
+                 for i, j in enumerate(nums))
         scheduler.update_from_dict(s)
         scheduler.update_from_dict(s)
         self.assertEqual(scheduler.tick(), min(nums))
         self.assertEqual(scheduler.tick(), min(nums))
 
 

+ 3 - 2
celery/tests/app/test_builtins.py

@@ -65,8 +65,9 @@ class test_chunks(Case):
         def chunks_mul(l):
         def chunks_mul(l):
             return l
             return l
 
 
-        app.tasks['celery.chunks'](chunks_mul,
-                [(2, 2), (4, 4), (8, 8)], 1)
+        app.tasks['celery.chunks'](
+            chunks_mul, [(2, 2), (4, 4), (8, 8)], 1,
+        )
         self.assertTrue(apply_chunks.called)
         self.assertTrue(apply_chunks.called)
 
 
 
 

+ 1 - 1
celery/tests/app/test_celery.py

@@ -14,5 +14,5 @@ class test_celery_package(Case):
 
 
     def test_meta(self):
     def test_meta(self):
         for m in ('__author__', '__contact__', '__homepage__',
         for m in ('__author__', '__contact__', '__homepage__',
-                '__docformat__'):
+                  '__docformat__'):
             self.assertTrue(getattr(celery, m, None))
             self.assertTrue(getattr(celery, m, None))

+ 4 - 3
celery/tests/app/test_control.py

@@ -132,8 +132,9 @@ class test_Broadcast(Case):
 
 
     @with_mock_broadcast
     @with_mock_broadcast
     def test_broadcast_limit(self):
     def test_broadcast_limit(self):
-        self.control.broadcast('foobarbaz1', arguments=[], limit=None,
-                destination=[1, 2, 3])
+        self.control.broadcast(
+            'foobarbaz1', arguments=[], limit=None, destination=[1, 2, 3],
+        )
         self.assertIn('foobarbaz1', MockMailbox.sent)
         self.assertIn('foobarbaz1', MockMailbox.sent)
 
 
     @with_mock_broadcast
     @with_mock_broadcast
@@ -191,6 +192,6 @@ class test_Broadcast(Case):
     def test_revoke_from_resultset(self):
     def test_revoke_from_resultset(self):
         r = self.app.GroupResult(uuid(),
         r = self.app.GroupResult(uuid(),
                                  map(self.app.AsyncResult,
                                  map(self.app.AsyncResult,
-                                        [uuid() for i in range(10)]))
+                                     [uuid() for i in range(10)]))
         r.revoke()
         r.revoke()
         self.assertIn('revoke', MockMailbox.sent)
         self.assertIn('revoke', MockMailbox.sent)

+ 16 - 13
celery/tests/app/test_loaders.py

@@ -9,9 +9,9 @@ from mock import Mock, patch
 from celery import loaders
 from celery import loaders
 from celery.app import app_or_default
 from celery.app import app_or_default
 from celery.exceptions import (
 from celery.exceptions import (
-        NotConfigured,
-        ImproperlyConfigured,
-        CPendingDeprecationWarning,
+    NotConfigured,
+    ImproperlyConfigured,
+    CPendingDeprecationWarning,
 )
 )
 from celery.loaders import base
 from celery.loaders import base
 from celery.loaders import default
 from celery.loaders import default
@@ -49,15 +49,17 @@ class test_loaders(AppCase):
     def test_get_loader_cls(self):
     def test_get_loader_cls(self):
 
 
         self.assertEqual(loaders.get_loader_cls('default'),
         self.assertEqual(loaders.get_loader_cls('default'),
-                          default.Loader)
+                         default.Loader)
 
 
     def test_current_loader(self):
     def test_current_loader(self):
-        with self.assertWarnsRegex(CPendingDeprecationWarning,
+        with self.assertWarnsRegex(
+                CPendingDeprecationWarning,
                 r'deprecation'):
                 r'deprecation'):
             self.assertIs(loaders.current_loader(), self.app.loader)
             self.assertIs(loaders.current_loader(), self.app.loader)
 
 
     def test_load_settings(self):
     def test_load_settings(self):
-        with self.assertWarnsRegex(CPendingDeprecationWarning,
+        with self.assertWarnsRegex(
+                CPendingDeprecationWarning,
                 r'deprecation'):
                 r'deprecation'):
             self.assertIs(loaders.load_settings(), self.app.conf)
             self.assertIs(loaders.load_settings(), self.app.conf)
 
 
@@ -102,12 +104,13 @@ class test_LoaderBase(Case):
 
 
     def test_import_default_modules(self):
     def test_import_default_modules(self):
         modnames = lambda l: [m.__name__ for m in l]
         modnames = lambda l: [m.__name__ for m in l]
-        prev, self.app.conf.CELERY_IMPORTS = \
-                self.app.conf.CELERY_IMPORTS, ('os', 'sys')
+        prev, self.app.conf.CELERY_IMPORTS = (
+            self.app.conf.CELERY_IMPORTS, ('os', 'sys'))
         try:
         try:
-            self.assertEqual(sorted(modnames(
-                                self.loader.import_default_modules())),
-                            sorted(modnames([os, sys])))
+            self.assertEqual(
+                sorted(modnames(self.loader.import_default_modules())),
+                sorted(modnames([os, sys])),
+            )
         finally:
         finally:
             self.app.conf.CELERY_IMPORTS = prev
             self.app.conf.CELERY_IMPORTS = prev
 
 
@@ -263,8 +266,8 @@ class test_AppLoader(Case):
         self.assertEqual(self.loader.conf['BAR'], 20)
         self.assertEqual(self.loader.conf['BAR'], 20)
 
 
     def test_on_worker_init(self):
     def test_on_worker_init(self):
-        prev, self.app.conf.CELERY_IMPORTS = \
-                self.app.conf.CELERY_IMPORTS, ('subprocess', )
+        prev, self.app.conf.CELERY_IMPORTS = (
+            self.app.conf.CELERY_IMPORTS, ('subprocess', ))
         try:
         try:
             sys.modules.pop('subprocess', None)
             sys.modules.pop('subprocess', None)
             self.loader.init_worker()
             self.loader.init_worker()

+ 7 - 5
celery/tests/app/test_log.py

@@ -165,8 +165,10 @@ class test_default_logger(AppCase):
         Logging._setup = False
         Logging._setup = False
         logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
         logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
                                    root=False, colorize=None)
                                    root=False, colorize=None)
-        self.assertIs(get_handlers(logger)[0].stream, sys.__stderr__,
-                'setup_logger logs to stderr without logfile argument.')
+        self.assertIs(
+            get_handlers(logger)[0].stream, sys.__stderr__,
+            'setup_logger logs to stderr without logfile argument.',
+        )
 
 
     def test_setup_logger_no_handlers_stream(self):
     def test_setup_logger_no_handlers_stream(self):
         l = self.get_logger()
         l = self.get_logger()
@@ -175,7 +177,7 @@ class test_default_logger(AppCase):
         with override_stdouts() as outs:
         with override_stdouts() as outs:
             stdout, stderr = outs
             stdout, stderr = outs
             l = self.setup_logger(logfile=sys.stderr, loglevel=logging.INFO,
             l = self.setup_logger(logfile=sys.stderr, loglevel=logging.INFO,
-                                root=False)
+                                  root=False)
             l.info('The quick brown fox...')
             l.info('The quick brown fox...')
             self.assertIn('The quick brown fox...', stderr.getvalue())
             self.assertIn('The quick brown fox...', stderr.getvalue())
 
 
@@ -196,7 +198,7 @@ class test_default_logger(AppCase):
                 logger.error('foo')
                 logger.error('foo')
                 self.assertIn('foo', sio.getvalue())
                 self.assertIn('foo', sio.getvalue())
                 log.redirect_stdouts_to_logger(logger, stdout=False,
                 log.redirect_stdouts_to_logger(logger, stdout=False,
-                        stderr=False)
+                                               stderr=False)
         finally:
         finally:
             sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
             sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
 
 
@@ -222,7 +224,7 @@ class test_default_logger(AppCase):
 
 
     def test_logging_proxy_recurse_protection(self):
     def test_logging_proxy_recurse_protection(self):
         logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
         logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
-                root=False)
+                                   root=False)
         p = LoggingProxy(logger, loglevel=logging.ERROR)
         p = LoggingProxy(logger, loglevel=logging.ERROR)
         p._thread.recurse_protection = True
         p._thread.recurse_protection = True
         try:
         try:

+ 11 - 8
celery/tests/app/test_routes.py

@@ -79,13 +79,15 @@ class test_MapRoute(RouteCase):
     def test_route_for_task(self):
     def test_route_for_task(self):
         expand = E(current_app.amqp.queues)
         expand = E(current_app.amqp.queues)
         route = routes.MapRoute({mytask.name: b_queue})
         route = routes.MapRoute({mytask.name: b_queue})
-        self.assertDictContainsSubset(b_queue,
-                             expand(route.route_for_task(mytask.name)))
+        self.assertDictContainsSubset(
+            b_queue,
+            expand(route.route_for_task(mytask.name)),
+        )
         self.assertIsNone(route.route_for_task('celery.awesome'))
         self.assertIsNone(route.route_for_task('celery.awesome'))
 
 
     def test_expand_route_not_found(self):
     def test_expand_route_not_found(self):
         expand = E(current_app.amqp.Queues(
         expand = E(current_app.amqp.Queues(
-                    current_app.conf.CELERY_QUEUES, False))
+            current_app.conf.CELERY_QUEUES, False))
         route = routes.MapRoute({'a': {'queue': 'x'}})
         route = routes.MapRoute({'a': {'queue': 'x'}})
         with self.assertRaises(QueueNotFound):
         with self.assertRaises(QueueNotFound):
             expand(route.route_for_task('a'))
             expand(route.route_for_task('a'))
@@ -135,9 +137,11 @@ class test_lookup_route(RouteCase):
                             {mytask.name: {'queue': 'foo'}}))
                             {mytask.name: {'queue': 'foo'}}))
         router = Router(R, current_app.amqp.queues)
         router = Router(R, current_app.amqp.queues)
         self.assertEqual(router.route({}, mytask.name,
         self.assertEqual(router.route({}, mytask.name,
-                          args=[1, 2], kwargs={})['queue'].name, 'foo')
-        self.assertEqual(router.route({}, 'celery.poza')['queue'].name,
-                current_app.conf.CELERY_DEFAULT_QUEUE)
+                         args=[1, 2], kwargs={})['queue'].name, 'foo')
+        self.assertEqual(
+            router.route({}, 'celery.poza')['queue'].name,
+            current_app.conf.CELERY_DEFAULT_QUEUE,
+        )
 
 
 
 
 class test_prepare(Case):
 class test_prepare(Case):
@@ -146,8 +150,7 @@ class test_prepare(Case):
         from celery.datastructures import LRUCache
         from celery.datastructures import LRUCache
         o = object()
         o = object()
         R = [{'foo': 'bar'},
         R = [{'foo': 'bar'},
-                  'celery.datastructures.LRUCache',
-                  o]
+             'celery.datastructures.LRUCache', o]
         p = routes.prepare(R)
         p = routes.prepare(R)
         self.assertIsInstance(p[0], routes.MapRoute)
         self.assertIsInstance(p[0], routes.MapRoute)
         self.assertIsInstance(maybe_promise(p[1]), LRUCache)
         self.assertIsInstance(maybe_promise(p[1]), LRUCache)

+ 6 - 6
celery/tests/backends/test_amqp.py

@@ -178,7 +178,7 @@ class test_AMQPBackend(AppCase):
         r1 = backend.get_task_meta(uuid())
         r1 = backend.get_task_meta(uuid())
         self.assertDictContainsSubset({'status': states.FAILURE,
         self.assertDictContainsSubset({'status': states.FAILURE,
                                        'seq': 3}, r1,
                                        'seq': 3}, r1,
-                                       'FFWDs to the last state')
+                                      'FFWDs to the last state')
 
 
         # Caches last known state.
         # Caches last known state.
         results.put(Message())
         results.put(Message())
@@ -242,11 +242,11 @@ class test_AMQPBackend(AppCase):
 
 
         res = list(b.get_many(tids, timeout=1))
         res = list(b.get_many(tids, timeout=1))
         expected_results = [(tid, {'status': states.SUCCESS,
         expected_results = [(tid, {'status': states.SUCCESS,
-                                    'result': i,
-                                    'traceback': None,
-                                    'task_id': tid,
-                                    'children': None})
-                                for i, tid in enumerate(tids)]
+                                   'result': i,
+                                   'traceback': None,
+                                   'task_id': tid,
+                                   'children': None})
+                            for i, tid in enumerate(tids)]
         self.assertEqual(sorted(res), sorted(expected_results))
         self.assertEqual(sorted(res), sorted(expected_results))
         self.assertDictEqual(b._cache[res[0][0]], res[0][1])
         self.assertDictEqual(b._cache[res[0][0]], res[0][1])
         cached_res = list(b.get_many(tids, timeout=1))
         cached_res = list(b.get_many(tids, timeout=1))

+ 2 - 2
celery/tests/backends/test_base.py

@@ -12,7 +12,7 @@ from celery.result import AsyncResult, GroupResult
 from celery.utils import serialization
 from celery.utils import serialization
 from celery.utils.serialization import subclass_exception
 from celery.utils.serialization import subclass_exception
 from celery.utils.serialization import \
 from celery.utils.serialization import \
-        find_nearest_pickleable_exception as fnpe
+    find_nearest_pickleable_exception as fnpe
 from celery.utils.serialization import UnpickleableExceptionWrapper
 from celery.utils.serialization import UnpickleableExceptionWrapper
 from celery.utils.serialization import get_pickleable_exception as gpe
 from celery.utils.serialization import get_pickleable_exception as gpe
 
 
@@ -44,7 +44,7 @@ class test_serialization(Case):
     def test_create_exception_cls(self):
     def test_create_exception_cls(self):
         self.assertTrue(serialization.create_exception_cls('FooError', 'm'))
         self.assertTrue(serialization.create_exception_cls('FooError', 'm'))
         self.assertTrue(serialization.create_exception_cls('FooError', 'm',
         self.assertTrue(serialization.create_exception_cls('FooError', 'm',
-                                                            KeyError))
+                                                           KeyError))
 
 
 
 
 class test_BaseBackend_interface(Case):
 class test_BaseBackend_interface(Case):

+ 2 - 2
celery/tests/backends/test_cache.py

@@ -122,8 +122,8 @@ class MemcachedClient(DummyClient):
     def set(self, key, value, *args, **kwargs):
     def set(self, key, value, *args, **kwargs):
         if isinstance(key, unicode):
         if isinstance(key, unicode):
             raise MyMemcachedStringEncodingError(
             raise MyMemcachedStringEncodingError(
-                    'Keys must be str, not unicode.  Convert your unicode '
-                    'strings using mystring.encode(charset)!')
+                'Keys must be str, not unicode.  Convert your unicode '
+                'strings using mystring.encode(charset)!')
         return super(MemcachedClient, self).set(key, value, *args, **kwargs)
         return super(MemcachedClient, self).set(key, value, *args, **kwargs)
 
 
 
 

+ 9 - 8
celery/tests/backends/test_cassandra.py

@@ -79,8 +79,7 @@ class test_CassandraBackend(AppCase):
             # no servers raises ImproperlyConfigured
             # no servers raises ImproperlyConfigured
             with self.assertRaises(ImproperlyConfigured):
             with self.assertRaises(ImproperlyConfigured):
                 app.conf.CASSANDRA_SERVERS = None
                 app.conf.CASSANDRA_SERVERS = None
-                mod.CassandraBackend(app=app, keyspace='b',
-                        column_family='c')
+                mod.CassandraBackend(app=app, keyspace='b', column_family='c')
 
 
     def test_reduce(self):
     def test_reduce(self):
         with mock_module('pycassa'):
         with mock_module('pycassa'):
@@ -99,12 +98,14 @@ class test_CassandraBackend(AppCase):
             Get_Column = x._get_column_family = Mock()
             Get_Column = x._get_column_family = Mock()
             get_column = Get_Column.return_value = Mock()
             get_column = Get_Column.return_value = Mock()
             get = get_column.get
             get = get_column.get
-            META = get.return_value = {'task_id': 'task_id',
-                                'status': states.SUCCESS,
-                                'result': '1',
-                                'date_done': 'date',
-                                'traceback': '',
-                                'children': None}
+            META = get.return_value = {
+                'task_id': 'task_id',
+                'status': states.SUCCESS,
+                'result': '1',
+                'date_done': 'date',
+                'traceback': '',
+                'children': None,
+            }
             x.decode = Mock()
             x.decode = Mock()
             x.detailed_mode = False
             x.detailed_mode = False
             meta = x._get_task_meta_for('task_id')
             meta = x._get_task_meta_for('task_id')

+ 3 - 2
celery/tests/bin/test_base.py

@@ -88,8 +88,9 @@ class test_Command(AppCase):
         try:
         try:
             cmd = MockCommand()
             cmd = MockCommand()
             cmd.setup_app_from_commandline(['--broker=xyzza://'])
             cmd.setup_app_from_commandline(['--broker=xyzza://'])
-            self.assertEqual(os.environ.get('CELERY_BROKER_URL'),
-                    'xyzza://')
+            self.assertEqual(
+                os.environ.get('CELERY_BROKER_URL'), 'xyzza://',
+            )
         finally:
         finally:
             if prev:
             if prev:
                 os.environ['CELERY_BROKER_URL'] = prev
                 os.environ['CELERY_BROKER_URL'] = prev

+ 8 - 4
celery/tests/bin/test_camqadm.py

@@ -70,10 +70,14 @@ class test_AMQShell(AppCase):
         self.shell.onecmd('queue.delete foo')
         self.shell.onecmd('queue.delete foo')
 
 
     def test_completenames(self):
     def test_completenames(self):
-        self.assertEqual(self.shell.completenames('queue.dec'),
-                ['queue.declare'])
-        self.assertEqual(self.shell.completenames('declare'),
-                ['queue.declare', 'exchange.declare'])
+        self.assertEqual(
+            self.shell.completenames('queue.dec'),
+            ['queue.declare'],
+        )
+        self.assertEqual(
+            self.shell.completenames('declare'),
+            ['queue.declare', 'exchange.declare'],
+        )
 
 
     def test_empty_line(self):
     def test_empty_line(self):
         self.shell.emptyline = Mock()
         self.shell.emptyline = Mock()

+ 9 - 5
celery/tests/bin/test_celery.py

@@ -50,7 +50,7 @@ class test_Command(AppCase):
         self.cmd.run_from_argv = Mock()
         self.cmd.run_from_argv = Mock()
         self.assertEqual(self.cmd.show_help('foo'), EX_USAGE)
         self.assertEqual(self.cmd.show_help('foo'), EX_USAGE)
         self.cmd.run_from_argv.assert_called_with(
         self.cmd.run_from_argv.assert_called_with(
-                self.cmd.prog_name, ['foo', '--help']
+            self.cmd.prog_name, ['foo', '--help']
         )
         )
 
 
     def test_error(self):
     def test_error(self):
@@ -82,10 +82,14 @@ class test_Command(AppCase):
         self.assertIn('bar', self.cmd.prettify(['foo', 'bar'])[1])
         self.assertIn('bar', self.cmd.prettify(['foo', 'bar'])[1])
 
 
     def test_prettify_dict(self):
     def test_prettify_dict(self):
-        self.assertIn('OK',
-            str(self.cmd.prettify({'ok': 'the quick brown fox'})[0]))
-        self.assertIn('ERROR',
-            str(self.cmd.prettify({'error': 'the quick brown fox'})[0]))
+        self.assertIn(
+            'OK',
+            str(self.cmd.prettify({'ok': 'the quick brown fox'})[0]),
+        )
+        self.assertIn(
+            'ERROR',
+            str(self.cmd.prettify({'error': 'the quick brown fox'})[0]),
+        )
 
 
     def test_prettify(self):
     def test_prettify(self):
         self.assertIn('OK', str(self.cmd.prettify('the quick brown')))
         self.assertIn('OK', str(self.cmd.prettify('the quick brown')))

+ 2 - 2
celery/tests/bin/test_celerybeat.py

@@ -156,8 +156,8 @@ class test_div(AppCase):
 
 
     def setup(self):
     def setup(self):
         self.prev, beatapp.Beat = beatapp.Beat, MockBeat
         self.prev, beatapp.Beat = beatapp.Beat, MockBeat
-        self.ctx, celerybeat_bin.detached = \
-                celerybeat_bin.detached, MockDaemonContext
+        self.ctx, celerybeat_bin.detached = (
+            celerybeat_bin.detached, MockDaemonContext)
 
 
     def teardown(self):
     def teardown(self):
         beatapp.Beat = self.prev
         beatapp.Beat = self.prev

+ 13 - 11
celery/tests/bin/test_celeryd.py

@@ -214,8 +214,7 @@ class test_Worker(WorkerAppCase):
 
 
         # test when there are too few output lines
         # test when there are too few output lines
         # to draft the ascii art onto
         # to draft the ascii art onto
-        prev, cd.ARTLINES = (cd.ARTLINES,
-            ['the quick brown fox'])
+        prev, cd.ARTLINES = cd.ARTLINES, ['the quick brown fox']
         self.assertTrue(worker.startup_info())
         self.assertTrue(worker.startup_info())
 
 
     @disable_stdouts
     @disable_stdouts
@@ -242,10 +241,10 @@ class test_Worker(WorkerAppCase):
         app = current_app
         app = current_app
         c = app.conf
         c = app.conf
         p, app.amqp.queues = app.amqp.queues, app.amqp.Queues({
         p, app.amqp.queues = app.amqp.queues, app.amqp.Queues({
-                'celery': {'exchange': 'celery',
-                           'routing_key': 'celery'},
-                'video': {'exchange': 'video',
-                           'routing_key': 'video'}})
+            'celery': {'exchange': 'celery',
+                       'routing_key': 'celery'},
+            'video': {'exchange': 'video',
+                      'routing_key': 'video'}})
         try:
         try:
             worker = self.Worker(queues=['video'])
             worker = self.Worker(queues=['video'])
             worker.init_queues()
             worker.init_queues()
@@ -279,8 +278,10 @@ class test_Worker(WorkerAppCase):
         worker1 = self.Worker(include='some.module')
         worker1 = self.Worker(include='some.module')
         self.assertListEqual(worker1.include, ['some.module'])
         self.assertListEqual(worker1.include, ['some.module'])
         worker2 = self.Worker(include='some.module,another.package')
         worker2 = self.Worker(include='some.module,another.package')
-        self.assertListEqual(worker2.include,
-                ['some.module', 'another.package'])
+        self.assertListEqual(
+            worker2.include,
+            ['some.module', 'another.package'],
+        )
         self.Worker(include=['os', 'sys'])
         self.Worker(include=['os', 'sys'])
 
 
     @disable_stdouts
     @disable_stdouts
@@ -300,7 +301,8 @@ class test_Worker(WorkerAppCase):
 
 
         prev, os.getuid = os.getuid, getuid
         prev, os.getuid = os.getuid, getuid
         try:
         try:
-            with self.assertWarnsRegex(RuntimeWarning,
+            with self.assertWarnsRegex(
+                    RuntimeWarning,
                     r'superuser privileges is discouraged'):
                     r'superuser privileges is discouraged'):
                 worker = self.Worker()
                 worker = self.Worker()
                 worker.run()
                 worker.run()
@@ -557,7 +559,7 @@ class test_signal_handlers(WorkerAppCase):
                 c.return_value = 3
                 c.return_value = 3
                 worker = self._Worker()
                 worker = self._Worker()
                 handlers = self.psig(
                 handlers = self.psig(
-                        cd.install_worker_term_hard_handler, worker)
+                    cd.install_worker_term_hard_handler, worker)
                 try:
                 try:
                     handlers['SIGQUIT']('SIGQUIT', object())
                     handlers['SIGQUIT']('SIGQUIT', object())
                     self.assertTrue(state.should_terminate)
                     self.assertTrue(state.should_terminate)
@@ -567,7 +569,7 @@ class test_signal_handlers(WorkerAppCase):
                 c.return_value = 1
                 c.return_value = 1
                 worker = self._Worker()
                 worker = self._Worker()
                 handlers = self.psig(
                 handlers = self.psig(
-                        cd.install_worker_term_hard_handler, worker)
+                    cd.install_worker_term_hard_handler, worker)
                 with self.assertRaises(SystemTerminate):
                 with self.assertRaises(SystemTerminate):
                     handlers['SIGQUIT']('SIGQUIT', object())
                     handlers['SIGQUIT']('SIGQUIT', object())
         finally:
         finally:

+ 5 - 4
celery/tests/bin/test_celeryd_detach.py

@@ -26,14 +26,14 @@ if not current_app.IS_WINDOWS:
             context.__exit__ = Mock()
             context.__exit__ = Mock()
 
 
             detach('/bin/boo', ['a', 'b', 'c'], logfile='/var/log',
             detach('/bin/boo', ['a', 'b', 'c'], logfile='/var/log',
-                    pidfile='/var/pid')
+                   pidfile='/var/pid')
             detached.assert_called_with('/var/log', '/var/pid', None, None, 0,
             detached.assert_called_with('/var/log', '/var/pid', None, None, 0,
                                         None, False)
                                         None, False)
             execv.assert_called_with('/bin/boo', ['/bin/boo', 'a', 'b', 'c'])
             execv.assert_called_with('/bin/boo', ['/bin/boo', 'a', 'b', 'c'])
 
 
             execv.side_effect = Exception('foo')
             execv.side_effect = Exception('foo')
-            r = detach('/bin/boo', ['a', 'b', 'c'], logfile='/var/log',
-                    pidfile='/var/pid')
+            r = detach('/bin/boo', ['a', 'b', 'c'],
+                       logfile='/var/log', pidfile='/var/pid')
             context.__enter__.assert_called_with()
             context.__enter__.assert_called_with()
             self.assertTrue(logger.critical.called)
             self.assertTrue(logger.critical.called)
             setup_logs.assert_called_with('ERROR', '/var/log')
             setup_logs.assert_called_with('ERROR', '/var/log')
@@ -85,7 +85,8 @@ class test_Command(Case):
         x = detached_celeryd()
         x = detached_celeryd()
         x.execute_from_commandline(self.argv)
         x.execute_from_commandline(self.argv)
         self.assertTrue(exit.called)
         self.assertTrue(exit.called)
-        detach.assert_called_with(path=x.execv_path, uid=None, gid=None,
+        detach.assert_called_with(
+            path=x.execv_path, uid=None, gid=None,
             umask=0, fake=False, logfile='/var/log', pidfile='celeryd.pid',
             umask=0, fake=False, logfile='/var/log', pidfile='celeryd.pid',
             argv=['-m', 'celery.bin.celeryd', '-c', '1', '-lDEBUG',
             argv=['-m', 'celery.bin.celeryd', '-c', '1', '-lDEBUG',
                   '--logfile=/var/log', '--pidfile=celeryd.pid',
                   '--logfile=/var/log', '--pidfile=celeryd.pid',

+ 57 - 37
celery/tests/bin/test_celeryd_multi.py

@@ -44,8 +44,10 @@ class test_functions(Case):
     def test_parse_ns_range(self):
     def test_parse_ns_range(self):
         self.assertEqual(parse_ns_range('1-3', True), ['1', '2', '3'])
         self.assertEqual(parse_ns_range('1-3', True), ['1', '2', '3'])
         self.assertEqual(parse_ns_range('1-3', False), ['1-3'])
         self.assertEqual(parse_ns_range('1-3', False), ['1-3'])
-        self.assertEqual(parse_ns_range('1-3,10,11,20', True),
-                ['1', '2', '3', '10', '11', '20'])
+        self.assertEqual(parse_ns_range(
+            '1-3,10,11,20', True),
+            ['1', '2', '3', '10', '11', '20'],
+        )
 
 
     def test_format_opt(self):
     def test_format_opt(self):
         self.assertEqual(format_opt('--foo', None), '--foo')
         self.assertEqual(format_opt('--foo', None), '--foo')
@@ -79,62 +81,77 @@ class test_multi_args(Case):
 
 
     @patch('socket.gethostname')
     @patch('socket.gethostname')
     def test_parse(self, gethostname):
     def test_parse(self, gethostname):
-        p = NamespacedOptionParser(['-c:jerry,elaine', '5',
-                                    '--loglevel:kramer=DEBUG',
-                                    '--flag',
-                                    '--logfile=foo', '-Q', 'bar', 'jerry',
-                                    'elaine', 'kramer',
-                                    '--', '.disable_rate_limits=1'])
+        p = NamespacedOptionParser([
+            '-c:jerry,elaine', '5',
+            '--loglevel:kramer=DEBUG',
+            '--flag',
+            '--logfile=foo', '-Q', 'bar', 'jerry',
+            'elaine', 'kramer',
+            '--', '.disable_rate_limits=1',
+        ])
         it = multi_args(p, cmd='COMMAND', append='*AP*',
         it = multi_args(p, cmd='COMMAND', append='*AP*',
-                prefix='*P*', suffix='*S*')
+                        prefix='*P*', suffix='*S*')
         names = list(it)
         names = list(it)
-        self.assertEqual(names[0][0:2], ('*P*jerry*S*',
-            [
+        self.assertEqual(
+            names[0][0:2],
+            ('*P*jerry*S*', [
                 'COMMAND', '-n *P*jerry*S*', '-Q bar',
                 'COMMAND', '-n *P*jerry*S*', '-Q bar',
                 '-c 5', '--flag', '--logfile=foo',
                 '-c 5', '--flag', '--logfile=foo',
                 '-- .disable_rate_limits=1', '*AP*',
                 '-- .disable_rate_limits=1', '*AP*',
-            ]
-        ))
-        self.assertEqual(names[1][0:2], ('*P*elaine*S*',
-            [
+            ]),
+        )
+        self.assertEqual(
+            names[1][0:2],
+            ('*P*elaine*S*', [
                 'COMMAND', '-n *P*elaine*S*', '-Q bar',
                 'COMMAND', '-n *P*elaine*S*', '-Q bar',
                 '-c 5', '--flag', '--logfile=foo',
                 '-c 5', '--flag', '--logfile=foo',
                 '-- .disable_rate_limits=1', '*AP*',
                 '-- .disable_rate_limits=1', '*AP*',
-            ]
-        ))
-        self.assertEqual(names[2][0:2], ('*P*kramer*S*',
-            [
+            ]),
+        )
+        self.assertEqual(
+            names[2][0:2],
+            ('*P*kramer*S*', [
                 'COMMAND', '--loglevel=DEBUG', '-n *P*kramer*S*',
                 'COMMAND', '--loglevel=DEBUG', '-n *P*kramer*S*',
                 '-Q bar', '--flag', '--logfile=foo',
                 '-Q bar', '--flag', '--logfile=foo',
                 '-- .disable_rate_limits=1', '*AP*',
                 '-- .disable_rate_limits=1', '*AP*',
-            ]
-        ))
+            ]),
+        )
         expand = names[0][2]
         expand = names[0][2]
         self.assertEqual(expand('%h'), '*P*jerry*S*')
         self.assertEqual(expand('%h'), '*P*jerry*S*')
         self.assertEqual(expand('%n'), 'jerry')
         self.assertEqual(expand('%n'), 'jerry')
         names2 = list(multi_args(p, cmd='COMMAND', append='',
         names2 = list(multi_args(p, cmd='COMMAND', append='',
-                prefix='*P*', suffix='*S*'))
+                      prefix='*P*', suffix='*S*'))
         self.assertEqual(names2[0][1][-1], '-- .disable_rate_limits=1')
         self.assertEqual(names2[0][1][-1], '-- .disable_rate_limits=1')
 
 
         gethostname.return_value = 'example.com'
         gethostname.return_value = 'example.com'
         p2 = NamespacedOptionParser(['10', '-c:1', '5'])
         p2 = NamespacedOptionParser(['10', '-c:1', '5'])
         names3 = list(multi_args(p2, cmd='COMMAND'))
         names3 = list(multi_args(p2, cmd='COMMAND'))
         self.assertEqual(len(names3), 10)
         self.assertEqual(len(names3), 10)
-        self.assertEqual(names3[0][0:2], ('celery1.example.com',
-            ['COMMAND', '-n celery1.example.com', '-c 5', '']))
+        self.assertEqual(
+            names3[0][0:2],
+            ('celery1.example.com',
+             ['COMMAND', '-n celery1.example.com', '-c 5', '']),
+        )
         for i, worker in enumerate(names3[1:]):
         for i, worker in enumerate(names3[1:]):
-            self.assertEqual(worker[0:2], ('celery%s.example.com' % (i + 2),
-                ['COMMAND', '-n celery%s.example.com' % (i + 2), '']))
+            self.assertEqual(
+                worker[0:2],
+                ('celery%s.example.com' % (i + 2),
+                 ['COMMAND', '-n celery%s.example.com' % (i + 2), '']),
+            )
 
 
         names4 = list(multi_args(p2, cmd='COMMAND', suffix='""'))
         names4 = list(multi_args(p2, cmd='COMMAND', suffix='""'))
         self.assertEqual(len(names4), 10)
         self.assertEqual(len(names4), 10)
-        self.assertEqual(names4[0][0:2], ('celery1',
-            ['COMMAND', '-n celery1', '-c 5', '']))
+        self.assertEqual(
+            names4[0][0:2],
+            ('celery1', ['COMMAND', '-n celery1', '-c 5', '']),
+        )
 
 
         p3 = NamespacedOptionParser(['foo', '-c:foo', '5'])
         p3 = NamespacedOptionParser(['foo', '-c:foo', '5'])
         names5 = list(multi_args(p3, cmd='COMMAND', suffix='""'))
         names5 = list(multi_args(p3, cmd='COMMAND', suffix='""'))
-        self.assertEqual(names5[0][0:2], ('foo',
-            ['COMMAND', '-n foo', '-c 5', '']))
+        self.assertEqual(
+            names5[0][0:2],
+            ('foo', ['COMMAND', '-n foo', '-c 5', '']),
+        )
 
 
 
 
 class test_MultiTool(Case):
 class test_MultiTool(Case):
@@ -188,7 +205,7 @@ class test_MultiTool(Case):
         pipe.wait.return_value = 2
         pipe.wait.return_value = 2
         self.assertEqual(self.t.waitexec(['-m', 'foo'], 'path'), 2)
         self.assertEqual(self.t.waitexec(['-m', 'foo'], 'path'), 2)
         self.t.note.assert_called_with(
         self.t.note.assert_called_with(
-                '* Child terminated with failure code 2')
+            '* Child terminated with failure code 2')
 
 
         pipe.wait.return_value = 0
         pipe.wait.return_value = 0
         self.assertFalse(self.t.waitexec(['-m', 'foo', 'path']))
         self.assertFalse(self.t.waitexec(['-m', 'foo', 'path']))
@@ -213,8 +230,9 @@ class test_MultiTool(Case):
 
 
     def test_expand(self):
     def test_expand(self):
         self.t.expand(['foo%n', 'ask', 'klask', 'dask'])
         self.t.expand(['foo%n', 'ask', 'klask', 'dask'])
-        self.assertEqual(self.fh.getvalue(),
-                'fooask\nfooklask\nfoodask\n')
+        self.assertEqual(
+            self.fh.getvalue(), 'fooask\nfooklask\nfoodask\n',
+        )
 
 
     def test_restart(self):
     def test_restart(self):
         stop = self.t._stop_nodes = Mock()
         stop = self.t._stop_nodes = Mock()
@@ -287,10 +305,10 @@ class test_MultiTool(Case):
         nodes = self.t.getpids(p, 'celeryd', callback=callback)
         nodes = self.t.getpids(p, 'celeryd', callback=callback)
         self.assertEqual(nodes, [
         self.assertEqual(nodes, [
             ('foo.e.com',
             ('foo.e.com',
-              ('celeryd', '--pidfile=celeryd@foo.pid', '-n foo.e.com', ''),
+             ('celeryd', '--pidfile=celeryd@foo.pid', '-n foo.e.com', ''),
              10),
              10),
             ('bar.e.com',
             ('bar.e.com',
-              ('celeryd', '--pidfile=celeryd@bar.pid', '-n bar.e.com', ''),
+             ('celeryd', '--pidfile=celeryd@bar.pid', '-n bar.e.com', ''),
              11),
              11),
         ])
         ])
         self.assertTrue(callback.called)
         self.assertTrue(callback.called)
@@ -411,8 +429,10 @@ class test_MultiTool(Case):
         self.t.execute_from_commandline(['multi', '-foo'])
         self.t.execute_from_commandline(['multi', '-foo'])
         self.t.error.assert_called_with()
         self.t.error.assert_called_with()
 
 
-        self.t.execute_from_commandline(['multi', 'start', 'foo',
-                '--nosplash', '--quiet', '-q', '--verbose', '--no-color'])
+        self.t.execute_from_commandline(
+            ['multi', 'start', 'foo',
+             '--nosplash', '--quiet', '-q', '--verbose', '--no-color'],
+        )
         self.assertTrue(self.t.nosplash)
         self.assertTrue(self.t.nosplash)
         self.assertTrue(self.t.quiet)
         self.assertTrue(self.t.quiet)
         self.assertTrue(self.t.verbose)
         self.assertTrue(self.t.verbose)

+ 3 - 3
celery/tests/compat.py

@@ -11,7 +11,7 @@ class WarningMessage(object):
                         'line')
                         'line')
 
 
     def __init__(self, message, category, filename, lineno, file=None,
     def __init__(self, message, category, filename, lineno, file=None,
-                    line=None):
+                 line=None):
         local_values = locals()
         local_values = locals()
         for attr in self._WARNING_DETAILS:
         for attr in self._WARNING_DETAILS:
             setattr(self, attr, local_values[attr])
             setattr(self, attr, local_values[attr])
@@ -20,8 +20,8 @@ class WarningMessage(object):
 
 
     def __str__(self):
     def __str__(self):
         return ('{message : %r, category : %r, filename : %r, lineno : %s, '
         return ('{message : %r, category : %r, filename : %r, lineno : %s, '
-                    'line : %r}' % (self.message, self._category_name,
-                                    self.filename, self.lineno, self.line))
+                'line : %r}' % (self.message, self._category_name,
+                                self.filename, self.lineno, self.line))
 
 
 
 
 class catch_warnings(object):
 class catch_warnings(object):

+ 2 - 2
celery/tests/concurrency/test_concurrency.py

@@ -45,8 +45,8 @@ class test_BasePool(Case):
                      callback=gen_callback('callback'),
                      callback=gen_callback('callback'),
                      accept_callback=None)
                      accept_callback=None)
         self.assertDictEqual(scratch,
         self.assertDictEqual(scratch,
-                              {'target': (3, (8, 16)),
-                               'callback': (4, (42, ))})
+                             {'target': (3, (8, 16)),
+                              'callback': (4, (42, ))})
 
 
     def test_does_not_debug(self):
     def test_does_not_debug(self):
         x = BasePool(10)
         x = BasePool(10)

+ 2 - 2
celery/tests/concurrency/test_gevent.py

@@ -60,8 +60,8 @@ class test_Schedule(Case):
     def test_sched(self):
     def test_sched(self):
         with mock_module(*gevent_modules):
         with mock_module(*gevent_modules):
             with patch_many('gevent.greenlet',
             with patch_many('gevent.greenlet',
-                    'gevent.greenlet.GreenletExit') as (greenlet,
-                                                        GreenletExit):
+                            'gevent.greenlet.GreenletExit') as (greenlet,
+                                                                GreenletExit):
                 greenlet.Greenlet = object
                 greenlet.Greenlet = object
                 x = Schedule()
                 x = Schedule()
                 greenlet.Greenlet = Mock()
                 greenlet.Greenlet = Mock()

+ 4 - 4
celery/tests/concurrency/test_pool.py

@@ -59,7 +59,7 @@ class test_TaskPool(Case):
         self.assertEqual(res.get(), 100)
         self.assertEqual(res.get(), 100)
         time.sleep(0.5)
         time.sleep(0.5)
         self.assertDictContainsSubset({'ret_value': 100},
         self.assertDictContainsSubset({'ret_value': 100},
-                                       scratchpad.get(0))
+                                      scratchpad.get(0))
 
 
         self.assertIsInstance(res2.get(), ExceptionInfo)
         self.assertIsInstance(res2.get(), ExceptionInfo)
         self.assertTrue(scratchpad.get(1))
         self.assertTrue(scratchpad.get(1))
@@ -67,17 +67,17 @@ class test_TaskPool(Case):
         self.assertIsInstance(scratchpad[1]['ret_value'],
         self.assertIsInstance(scratchpad[1]['ret_value'],
                               ExceptionInfo)
                               ExceptionInfo)
         self.assertEqual(scratchpad[1]['ret_value'].exception.args,
         self.assertEqual(scratchpad[1]['ret_value'].exception.args,
-                          ('FOO EXCEPTION', ))
+                         ('FOO EXCEPTION', ))
 
 
         self.assertEqual(res3.get(), 400)
         self.assertEqual(res3.get(), 400)
         time.sleep(0.5)
         time.sleep(0.5)
         self.assertDictContainsSubset({'ret_value': 400},
         self.assertDictContainsSubset({'ret_value': 400},
-                                       scratchpad.get(2))
+                                      scratchpad.get(2))
 
 
         res3 = p.apply_async(do_something, args=[30], callback=mycallback)
         res3 = p.apply_async(do_something, args=[30], callback=mycallback)
 
 
         self.assertEqual(res3.get(), 900)
         self.assertEqual(res3.get(), 900)
         time.sleep(0.5)
         time.sleep(0.5)
         self.assertDictContainsSubset({'ret_value': 900},
         self.assertDictContainsSubset({'ret_value': 900},
-                                       scratchpad.get(3))
+                                      scratchpad.get(3))
         p.stop()
         p.stop()

+ 6 - 3
celery/tests/concurrency/test_threads.py

@@ -53,8 +53,11 @@ class test_TaskPool(Case):
             accept_callback = Mock()
             accept_callback = Mock()
             target = Mock()
             target = Mock()
             req = x.on_apply(target, args=(1, 2), kwargs={'a': 10},
             req = x.on_apply(target, args=(1, 2), kwargs={'a': 10},
-                callback=callback, accept_callback=accept_callback)
-            x.WorkRequest.assert_called_with(apply_target, (
-                target, (1, 2), {'a': 10}, callback, accept_callback))
+                             callback=callback,
+                             accept_callback=accept_callback)
+            x.WorkRequest.assert_called_with(
+                apply_target,
+                (target, (1, 2), {'a': 10}, callback, accept_callback),
+            )
             x._pool.putRequest.assert_called_with(req)
             x._pool.putRequest.assert_called_with(req)
             x._pool._results_queue.queue.clear.assert_called_with()
             x._pool._results_queue.queue.clear.assert_called_with()

+ 2 - 2
celery/tests/config.py

@@ -44,8 +44,8 @@ CELERY_MONGODB_BACKEND_SETTINGS = {
     'host': os.environ.get('MONGO_HOST') or 'localhost',
     'host': os.environ.get('MONGO_HOST') or 'localhost',
     'port': os.environ.get('MONGO_PORT') or 27017,
     'port': os.environ.get('MONGO_PORT') or 27017,
     'database': os.environ.get('MONGO_DB') or 'celery_unittests',
     'database': os.environ.get('MONGO_DB') or 'celery_unittests',
-    'taskmeta_collection': os.environ.get('MONGO_TASKMETA_COLLECTION') or
-        'taskmeta_collection',
+    'taskmeta_collection': (os.environ.get('MONGO_TASKMETA_COLLECTION')
+                            or 'taskmeta_collection'),
 }
 }
 if os.environ.get('MONGO_USER'):
 if os.environ.get('MONGO_USER'):
     CELERY_MONGODB_BACKEND_SETTINGS['user'] = os.environ.get('MONGO_USER')
     CELERY_MONGODB_BACKEND_SETTINGS['user'] = os.environ.get('MONGO_USER')

+ 17 - 7
celery/tests/contrib/test_migrate.py

@@ -15,13 +15,23 @@ from celery.tests.utils import AppCase, Case, Mock
 
 
 
 
 def Message(body, exchange='exchange', routing_key='rkey',
 def Message(body, exchange='exchange', routing_key='rkey',
-        compression=None, content_type='application/json',
-        content_encoding='utf-8'):
-    return Mock(attrs=dict(body=body,
-        delivery_info=dict(exchange=exchange, routing_key=routing_key),
-        headers=dict(compression=compression),
-        content_type=content_type, content_encoding=content_encoding,
-        properties={}))
+            compression=None, content_type='application/json',
+            content_encoding='utf-8'):
+    return Mock(
+        attrs={
+            'body': body,
+            'delivery_info': {
+                'exchange': exchange,
+                'routing_key': routing_key,
+            },
+            'headers': {
+                'compression': compression,
+            },
+            'content_type': content_type,
+            'content_encoding': content_encoding,
+            'properties': {}
+        },
+    )
 
 
 
 
 class test_State(Case):
 class test_State(Case):

+ 2 - 2
celery/tests/events/test_events.py

@@ -97,11 +97,11 @@ class test_EventDispatcher(AppCase):
                                                     enabled=True)
                                                     enabled=True)
             dispatcher2 = self.app.events.Dispatcher(connection,
             dispatcher2 = self.app.events.Dispatcher(connection,
                                                      enabled=True,
                                                      enabled=True,
-                                                      channel=channel)
+                                                     channel=channel)
             self.assertTrue(dispatcher.enabled)
             self.assertTrue(dispatcher.enabled)
             self.assertTrue(dispatcher.publisher.channel)
             self.assertTrue(dispatcher.publisher.channel)
             self.assertEqual(dispatcher.publisher.serializer,
             self.assertEqual(dispatcher.publisher.serializer,
-                            self.app.conf.CELERY_EVENT_SERIALIZER)
+                             self.app.conf.CELERY_EVENT_SERIALIZER)
 
 
             created_channel = dispatcher.publisher.channel
             created_channel = dispatcher.publisher.channel
             dispatcher.disable()
             dispatcher.disable()

+ 10 - 10
celery/tests/events/test_state.py

@@ -54,7 +54,7 @@ class ev_worker_heartbeats(replay):
     def setup(self):
     def setup(self):
         self.events = [
         self.events = [
             Event('worker-heartbeat', hostname='utest1',
             Event('worker-heartbeat', hostname='utest1',
-                timestamp=time() - HEARTBEAT_EXPIRE_WINDOW * 2),
+                  timestamp=time() - HEARTBEAT_EXPIRE_WINDOW * 2),
             Event('worker-heartbeat', hostname='utest1'),
             Event('worker-heartbeat', hostname='utest1'),
         ]
         ]
 
 
@@ -65,16 +65,16 @@ class ev_task_states(replay):
         tid = self.tid = uuid()
         tid = self.tid = uuid()
         self.events = [
         self.events = [
             Event('task-received', uuid=tid, name='task1',
             Event('task-received', uuid=tid, name='task1',
-                args='(2, 2)', kwargs="{'foo': 'bar'}",
-                retries=0, eta=None, hostname='utest1'),
+                  args='(2, 2)', kwargs="{'foo': 'bar'}",
+                  retries=0, eta=None, hostname='utest1'),
             Event('task-started', uuid=tid, hostname='utest1'),
             Event('task-started', uuid=tid, hostname='utest1'),
             Event('task-revoked', uuid=tid, hostname='utest1'),
             Event('task-revoked', uuid=tid, hostname='utest1'),
             Event('task-retried', uuid=tid, exception="KeyError('bar')",
             Event('task-retried', uuid=tid, exception="KeyError('bar')",
-                traceback='line 2 at main', hostname='utest1'),
+                  traceback='line 2 at main', hostname='utest1'),
             Event('task-failed', uuid=tid, exception="KeyError('foo')",
             Event('task-failed', uuid=tid, exception="KeyError('foo')",
-                traceback='line 1 at main', hostname='utest1'),
+                  traceback='line 1 at main', hostname='utest1'),
             Event('task-succeeded', uuid=tid, result='4',
             Event('task-succeeded', uuid=tid, result='4',
-                runtime=0.1234, hostname='utest1'),
+                  runtime=0.1234, hostname='utest1'),
         ]
         ]
 
 
 
 
@@ -90,7 +90,7 @@ class ev_snapshot(replay):
             worker = not i % 2 and 'utest2' or 'utest1'
             worker = not i % 2 and 'utest2' or 'utest1'
             type = not i % 2 and 'task2' or 'task1'
             type = not i % 2 and 'task2' or 'task1'
             self.events.append(Event('task-received', name=type,
             self.events.append(Event('task-received', name=type,
-                          uuid=uuid(), hostname=worker))
+                               uuid=uuid(), hostname=worker))
 
 
 
 
 class test_Worker(Case):
 class test_Worker(Case):
@@ -123,10 +123,10 @@ class test_Task(Case):
                     routing_key='celery',
                     routing_key='celery',
                     succeeded=time())
                     succeeded=time())
         self.assertEqual(sorted(list(task._info_fields)),
         self.assertEqual(sorted(list(task._info_fields)),
-                              sorted(task.info().keys()))
+                         sorted(task.info().keys()))
 
 
         self.assertEqual(sorted(list(task._info_fields + ('received', ))),
         self.assertEqual(sorted(list(task._info_fields + ('received', ))),
-                              sorted(task.info(extra=('received', ))))
+                         sorted(task.info(extra=('received', ))))
 
 
         self.assertEqual(sorted(['args', 'kwargs']),
         self.assertEqual(sorted(['args', 'kwargs']),
                          sorted(task.info(['args', 'kwargs']).keys()))
                          sorted(task.info(['args', 'kwargs']).keys()))
@@ -203,7 +203,7 @@ class test_State(Case):
         # STARTED
         # STARTED
         r.next()
         r.next()
         self.assertTrue(r.state.workers['utest1'].alive,
         self.assertTrue(r.state.workers['utest1'].alive,
-                'any task event adds worker heartbeat')
+                        'any task event adds worker heartbeat')
         self.assertEqual(task.state, states.STARTED)
         self.assertEqual(task.state, states.STARTED)
         self.assertTrue(task.started)
         self.assertTrue(task.started)
         self.assertEqual(task.timestamp, task.started)
         self.assertEqual(task.timestamp, task.started)

+ 16 - 12
celery/tests/functional/case.py

@@ -64,16 +64,20 @@ class Worker(object):
         return self.hostname in flatten_reply(r)
         return self.hostname in flatten_reply(r)
 
 
     def wait_until_started(self, timeout=10, interval=0.5):
     def wait_until_started(self, timeout=10, interval=0.5):
-        try_while(lambda: self.is_alive(interval),
-                "Worker won't start (after %s secs.)" % timeout,
-                interval=interval, timeout=timeout)
+        try_while(
+            lambda: self.is_alive(interval),
+            "Worker won't start (after %s secs.)" % timeout,
+            interval=interval, timeout=timeout,
+        )
         say('--WORKER %s IS ONLINE--' % self.hostname)
         say('--WORKER %s IS ONLINE--' % self.hostname)
 
 
     def ensure_shutdown(self, timeout=10, interval=0.5):
     def ensure_shutdown(self, timeout=10, interval=0.5):
         os.kill(self.pid, signal.SIGTERM)
         os.kill(self.pid, signal.SIGTERM)
-        try_while(lambda: not self.is_alive(interval),
-                  "Worker won't shutdown (after %s secs.)" % timeout,
-                  timeout=10, interval=0.5)
+        try_while(
+            lambda: not self.is_alive(interval),
+            "Worker won't shutdown (after %s secs.)" % timeout,
+            timeout=10, interval=0.5,
+        )
         say('--WORKER %s IS SHUTDOWN--' % self.hostname)
         say('--WORKER %s IS SHUTDOWN--' % self.hostname)
         self._shutdown_called = True
         self._shutdown_called = True
 
 
@@ -96,8 +100,8 @@ class Worker(object):
         def _ensure_shutdown_once():
         def _ensure_shutdown_once():
             if not worker._shutdown_called:
             if not worker._shutdown_called:
                 say('-- Found worker not stopped at shutdown: %s\n%s' % (
                 say('-- Found worker not stopped at shutdown: %s\n%s' % (
-                        worker.hostname,
-                        '\n'.join(stack)))
+                    worker.hostname,
+                    '\n'.join(stack)))
                 worker.ensure_shutdown()
                 worker.ensure_shutdown()
 
 
         return worker
         return worker
@@ -161,10 +165,10 @@ class WorkerCase(Case):
 
 
     def ensure_received(self, task_id, interval=0.5, timeout=10):
     def ensure_received(self, task_id, interval=0.5, timeout=10):
         return try_while(lambda: self.is_received(task_id, interval),
         return try_while(lambda: self.is_received(task_id, interval),
-                        'Task not receied within timeout',
-                        interval=0.5, timeout=10)
+                         'Task not receied within timeout',
+                         interval=0.5, timeout=10)
 
 
     def ensure_scheduled(self, task_id, interval=0.5, timeout=10):
     def ensure_scheduled(self, task_id, interval=0.5, timeout=10):
         return try_while(lambda: self.is_scheduled(task_id, interval),
         return try_while(lambda: self.is_scheduled(task_id, interval),
-                        'Task not scheduled within timeout',
-                        interval=0.5, timeout=10)
+                         'Task not scheduled within timeout',
+                         interval=0.5, timeout=10)

+ 1 - 1
celery/tests/security/test_security.py

@@ -41,7 +41,7 @@ class test_security(SecurityCase):
         self.assertEqual(0, len(disabled))
         self.assertEqual(0, len(disabled))
 
 
         disable_untrusted_serializers(
         disable_untrusted_serializers(
-                ['application/json', 'application/x-python-serialize'])
+            ['application/json', 'application/x-python-serialize'])
         self.assertIn('application/x-yaml', disabled)
         self.assertIn('application/x-yaml', disabled)
         self.assertNotIn('application/json', disabled)
         self.assertNotIn('application/json', disabled)
         self.assertNotIn('application/x-python-serialize', disabled)
         self.assertNotIn('application/x-python-serialize', disabled)

+ 3 - 3
celery/tests/slow/test_buckets.py

@@ -40,7 +40,7 @@ class MockJob(object):
 
 
     def __repr__(self):
     def __repr__(self):
         return '<MockJob: task:%s id:%s args:%s kwargs:%s' % (
         return '<MockJob: task:%s id:%s args:%s kwargs:%s' % (
-                self.name, self.id, self.args, self.kwargs)
+            self.name, self.id, self.args, self.kwargs)
 
 
 
 
 class test_TokenBucketQueue(Case):
 class test_TokenBucketQueue(Case):
@@ -105,9 +105,9 @@ class test_rate_limit_string(Case):
         self.assertEqual(timeutils.rate(999), 999)
         self.assertEqual(timeutils.rate(999), 999)
         self.assertEqual(timeutils.rate('1456/s'), 1456)
         self.assertEqual(timeutils.rate('1456/s'), 1456)
         self.assertEqual(timeutils.rate('100/m'),
         self.assertEqual(timeutils.rate('100/m'),
-                          100 / 60.0)
+                         100 / 60.0)
         self.assertEqual(timeutils.rate('10/h'),
         self.assertEqual(timeutils.rate('10/h'),
-                          10 / 60.0 / 60.0)
+                         10 / 60.0 / 60.0)
 
 
         for zero in (0, None, '0', '0/m', '0/h', '0/s'):
         for zero in (0, None, '0', '0/m', '0/h', '0/s'):
             self.assertEqual(timeutils.rate(zero), 0)
             self.assertEqual(timeutils.rate(zero), 0)

+ 12 - 6
celery/tests/tasks/test_canvas.py

@@ -50,14 +50,20 @@ class test_Signature(Case):
     def test_replace(self):
     def test_replace(self):
         x = Signature('TASK', ('A'), {})
         x = Signature('TASK', ('A'), {})
         self.assertTupleEqual(x.replace(args=('B', )).args, ('B', ))
         self.assertTupleEqual(x.replace(args=('B', )).args, ('B', ))
-        self.assertDictEqual(x.replace(kwargs={'FOO': 'BAR'}).kwargs,
-                {'FOO': 'BAR'})
-        self.assertDictEqual(x.replace(options={'task_id': '123'}).options,
-                {'task_id': '123'})
+        self.assertDictEqual(
+            x.replace(kwargs={'FOO': 'BAR'}).kwargs,
+            {'FOO': 'BAR'},
+        )
+        self.assertDictEqual(
+            x.replace(options={'task_id': '123'}).options,
+            {'task_id': '123'},
+        )
 
 
     def test_set(self):
     def test_set(self):
-        self.assertDictEqual(Signature('TASK', x=1).set(task_id='2').options,
-                {'x': 1, 'task_id': '2'})
+        self.assertDictEqual(
+            Signature('TASK', x=1).set(task_id='2').options,
+            {'x': 1, 'task_id': '2'},
+        )
 
 
     def test_link(self):
     def test_link(self):
         x = subtask(SIG)
         x = subtask(SIG)

+ 1 - 1
celery/tests/tasks/test_chord.py

@@ -88,7 +88,7 @@ class test_unlock_chord_task(AppCase):
             try:
             try:
                 callback = Mock()
                 callback = Mock()
                 unlock('group_id', callback, interval=10, max_retries=30,
                 unlock('group_id', callback, interval=10, max_retries=30,
-                            result=map(AsyncResult, [1, 2, 3]))
+                       result=map(AsyncResult, [1, 2, 3]))
                 self.assertFalse(callback.delay.call_count)
                 self.assertFalse(callback.delay.call_count)
                 # did retry
                 # did retry
                 unlock.retry.assert_called_with(countdown=10, max_retries=30)
                 unlock.retry.assert_called_with(countdown=10, max_retries=30)

+ 10 - 8
celery/tests/tasks/test_http.py

@@ -81,8 +81,10 @@ class test_MutableURL(Case):
         url = 'https://e.com:808/foo/bar#zeta?x=10&y=20'
         url = 'https://e.com:808/foo/bar#zeta?x=10&y=20'
         url = http.MutableURL(url)
         url = http.MutableURL(url)
 
 
-        self.assertEqual(str(url).split('?')[0],
-            'https://e.com:808/foo/bar#zeta')
+        self.assertEqual(
+            str(url).split('?')[0],
+            'https://e.com:808/foo/bar#zeta',
+        )
 
 
     def test___repr__(self):
     def test___repr__(self):
         url = http.MutableURL('http://e.com/foo/bar')
         url = http.MutableURL('http://e.com/foo/bar')
@@ -100,41 +102,41 @@ class test_HttpDispatch(Case):
     def test_dispatch_success(self):
     def test_dispatch_success(self):
         with mock_urlopen(success_response(100)):
         with mock_urlopen(success_response(100)):
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
-                                    'x': 10, 'y': 10})
+                'x': 10, 'y': 10})
             self.assertEqual(d.dispatch(), 100)
             self.assertEqual(d.dispatch(), 100)
 
 
     def test_dispatch_failure(self):
     def test_dispatch_failure(self):
         with mock_urlopen(fail_response('Invalid moon alignment')):
         with mock_urlopen(fail_response('Invalid moon alignment')):
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
-                                    'x': 10, 'y': 10})
+                'x': 10, 'y': 10})
             with self.assertRaises(http.RemoteExecuteError):
             with self.assertRaises(http.RemoteExecuteError):
                 d.dispatch()
                 d.dispatch()
 
 
     def test_dispatch_empty_response(self):
     def test_dispatch_empty_response(self):
         with mock_urlopen(_response('')):
         with mock_urlopen(_response('')):
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
-                                    'x': 10, 'y': 10})
+                'x': 10, 'y': 10})
             with self.assertRaises(http.InvalidResponseError):
             with self.assertRaises(http.InvalidResponseError):
                 d.dispatch()
                 d.dispatch()
 
 
     def test_dispatch_non_json(self):
     def test_dispatch_non_json(self):
         with mock_urlopen(_response("{'#{:'''")):
         with mock_urlopen(_response("{'#{:'''")):
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
-                                    'x': 10, 'y': 10})
+                'x': 10, 'y': 10})
             with self.assertRaises(http.InvalidResponseError):
             with self.assertRaises(http.InvalidResponseError):
                 d.dispatch()
                 d.dispatch()
 
 
     def test_dispatch_unknown_status(self):
     def test_dispatch_unknown_status(self):
         with mock_urlopen(unknown_response()):
         with mock_urlopen(unknown_response()):
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
             d = http.HttpDispatch('http://example.com/mul', 'GET', {
-                                    'x': 10, 'y': 10})
+                'x': 10, 'y': 10})
             with self.assertRaises(http.UnknownStatusError):
             with self.assertRaises(http.UnknownStatusError):
                 d.dispatch()
                 d.dispatch()
 
 
     def test_dispatch_POST(self):
     def test_dispatch_POST(self):
         with mock_urlopen(success_response(100)):
         with mock_urlopen(success_response(100)):
             d = http.HttpDispatch('http://example.com/mul', 'POST', {
             d = http.HttpDispatch('http://example.com/mul', 'POST', {
-                                    'x': 10, 'y': 10})
+                'x': 10, 'y': 10})
             self.assertEqual(d.dispatch(), 100)
             self.assertEqual(d.dispatch(), 100)
 
 
 
 

+ 3 - 4
celery/tests/tasks/test_registry.py

@@ -40,8 +40,7 @@ class test_TaskRegistry(Case):
 
 
     def test_task_registry(self):
     def test_task_registry(self):
         r = TaskRegistry()
         r = TaskRegistry()
-        self.assertIsInstance(r, dict,
-                'TaskRegistry is mapping')
+        self.assertIsInstance(r, dict, 'TaskRegistry is mapping')
 
 
         self.assertRegisterUnregisterCls(r, MockTask)
         self.assertRegisterUnregisterCls(r, MockTask)
         self.assertRegisterUnregisterCls(r, MockPeriodicTask)
         self.assertRegisterUnregisterCls(r, MockPeriodicTask)
@@ -54,11 +53,11 @@ class test_TaskRegistry(Case):
         tasks = dict(r)
         tasks = dict(r)
         self.assertIsInstance(tasks.get(MockTask.name), MockTask)
         self.assertIsInstance(tasks.get(MockTask.name), MockTask)
         self.assertIsInstance(tasks.get(MockPeriodicTask.name),
         self.assertIsInstance(tasks.get(MockPeriodicTask.name),
-                                   MockPeriodicTask)
+                              MockPeriodicTask)
 
 
         self.assertIsInstance(r[MockTask.name], MockTask)
         self.assertIsInstance(r[MockTask.name], MockTask)
         self.assertIsInstance(r[MockPeriodicTask.name],
         self.assertIsInstance(r[MockPeriodicTask.name],
-                                   MockPeriodicTask)
+                              MockPeriodicTask)
 
 
         r.unregister(MockTask)
         r.unregister(MockTask)
         self.assertNotIn(MockTask.name, r)
         self.assertNotIn(MockTask.name, r)

Some files were not shown because too many files changed in this diff