Browse Source

Change to Google-style docstrings and small updates.

Ask Solem 8 years ago
parent
commit
82e478e07c
100 changed files with 1821 additions and 2135 deletions
  1. 5 4
      celery/__init__.py
  2. 1 0
      celery/__main__.py
  3. 4 8
      celery/_state.py
  4. 11 15
      celery/app/__init__.py
  5. 21 27
      celery/app/amqp.py
  6. 5 8
      celery/app/annotations.py
  7. 115 138
      celery/app/base.py
  8. 2 9
      celery/app/builtins.py
  9. 74 62
      celery/app/control.py
  10. 1 7
      celery/app/defaults.py
  11. 9 12
      celery/app/log.py
  12. 6 13
      celery/app/registry.py
  13. 2 6
      celery/app/routes.py
  14. 220 242
      celery/app/task.py
  15. 7 8
      celery/app/trace.py
  16. 12 17
      celery/app/utils.py
  17. 7 14
      celery/apps/beat.py
  18. 6 13
      celery/apps/worker.py
  19. 1 7
      celery/backends/__init__.py
  20. 2 8
      celery/backends/amqp.py
  21. 1 7
      celery/backends/async.py
  22. 10 15
      celery/backends/base.py
  23. 9 14
      celery/backends/cache.py
  24. 64 81
      celery/backends/cassandra.py
  25. 3 9
      celery/backends/consul.py
  26. 5 10
      celery/backends/couchbase.py
  27. 10 15
      celery/backends/couchdb.py
  28. 5 17
      celery/backends/database/__init__.py
  29. 16 17
      celery/backends/database/models.py
  30. 1 7
      celery/backends/database/session.py
  31. 4 10
      celery/backends/elasticsearch.py
  32. 7 15
      celery/backends/filesystem.py
  33. 30 40
      celery/backends/mongodb.py
  34. 4 13
      celery/backends/redis.py
  35. 5 10
      celery/backends/riak.py
  36. 10 12
      celery/backends/rpc.py
  37. 29 31
      celery/beat.py
  38. 38 50
      celery/bin/amqp.py
  39. 20 16
      celery/bin/base.py
  40. 6 9
      celery/bin/beat.py
  41. 96 72
      celery/bin/celery.py
  42. 4 9
      celery/bin/celeryd_detach.py
  43. 15 16
      celery/bin/events.py
  44. 1 4
      celery/bin/graph.py
  45. 1 4
      celery/bin/logtool.py
  46. 14 13
      celery/bin/multi.py
  47. 7 8
      celery/bin/worker.py
  48. 9 16
      celery/bootsteps.py
  49. 225 36
      celery/canvas.py
  50. 1 7
      celery/concurrency/__init__.py
  51. 14 17
      celery/concurrency/asynpool.py
  52. 1 8
      celery/concurrency/base.py
  53. 1 7
      celery/concurrency/eventlet.py
  54. 1 7
      celery/concurrency/gevent.py
  55. 4 10
      celery/concurrency/prefork.py
  56. 1 7
      celery/concurrency/solo.py
  57. 5 10
      celery/contrib/abortable.py
  58. 32 42
      celery/contrib/migrate.py
  59. 16 13
      celery/contrib/rdb.py
  60. 5 7
      celery/contrib/sphinx.py
  61. 54 58
      celery/events/__init__.py
  62. 10 14
      celery/events/cursesmon.py
  63. 8 9
      celery/events/dumper.py
  64. 7 9
      celery/events/snapshot.py
  65. 15 20
      celery/events/state.py
  66. 2 8
      celery/exceptions.py
  67. 1 9
      celery/five.py
  68. 1 0
      celery/fixups/django.py
  69. 8 9
      celery/loaders/__init__.py
  70. 1 7
      celery/loaders/app.py
  71. 2 8
      celery/loaders/base.py
  72. 1 7
      celery/loaders/default.py
  73. 5 11
      celery/local.py
  74. 74 88
      celery/platforms.py
  75. 111 127
      celery/result.py
  76. 34 35
      celery/schedules.py
  77. 1 7
      celery/security/__init__.py
  78. 1 7
      celery/security/certificate.py
  79. 1 7
      celery/security/key.py
  80. 1 7
      celery/security/serialization.py
  81. 1 7
      celery/security/utils.py
  82. 6 9
      celery/signals.py
  83. 20 22
      celery/states.py
  84. 5 8
      celery/task/__init__.py
  85. 4 11
      celery/task/base.py
  86. 0 1
      celery/tests/security/__init__.py
  87. 1 3
      celery/tests/security/test_security.py
  88. 3 8
      celery/utils/__init__.py
  89. 1 7
      celery/utils/abstract.py
  90. 26 41
      celery/utils/collections.py
  91. 4 12
      celery/utils/debug.py
  92. 8 14
      celery/utils/deprecated.py
  93. 83 87
      celery/utils/dispatch/saferef.py
  94. 37 38
      celery/utils/dispatch/signal.py
  95. 1 7
      celery/utils/encoding.py
  96. 7 25
      celery/utils/functional.py
  97. 10 18
      celery/utils/graph.py
  98. 1 8
      celery/utils/imports.py
  99. 3 2
      celery/utils/iso8601.py
  100. 21 31
      celery/utils/log.py

+ 5 - 4
celery/__init__.py

@@ -81,9 +81,10 @@ if STATICA_HACK:  # pragma: no cover
 def _find_option_with_arg(argv, short_opts=None, long_opts=None):
     """Search argv for options specifying short and longopt alternatives.
 
-    :returns: value for option found
-    :raises KeyError: if option not found.
-
+    Returns:
+        str: value for option found
+    Raises:
+        KeyError: if option not found.
     """
     for i, arg in enumerate(argv):
         if arg.startswith('-'):
@@ -151,7 +152,7 @@ old_module, new_module = five.recreate_module(  # pragma: no cover
         'celery.app.task': ['Task'],
         'celery._state': ['current_app', 'current_task'],
         'celery.canvas': [
-            'chain', 'chord', 'chunks', 'group',
+            'Signature', 'chain', 'chord', 'chunks', 'group',
             'signature', 'maybe_signature', 'subtask',
             'xmap', 'xstarmap',
         ],

+ 1 - 0
celery/__main__.py

@@ -1,3 +1,4 @@
+"""Entry-point for the :program:`celery` umbrella command."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import sys

+ 4 - 8
celery/_state.py

@@ -1,13 +1,10 @@
 # -*- coding: utf-8 -*-
-"""
-    celery._state
-    ~~~~~~~~~~~~~~~
-
-    This is an internal module containing thread state
-    like the ``current_app``, and ``current_task``.
+"""Internal state.
 
-    This module shouldn't be used directly.
+This is an internal module containing thread state
+like the ``current_app``, and ``current_task``.
 
+This module shouldn't be used directly.
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
@@ -111,7 +108,6 @@ def get_current_worker_task():
     This is used to differentiate between the actual task
     executed by the worker and any task that was called within
     a task (using ``task.__call__`` or ``task.apply``)
-
     """
     for task in reversed(_task_stack.stack):
         if not task.request.called_directly:

+ 11 - 15
celery/app/__init__.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.app
-    ~~~~~~~~~~
-
-    Celery Application.
-
-"""
+"""Celery Application."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
@@ -20,10 +14,12 @@ from celery._state import (
 
 from .base import Celery, AppPickler
 
-__all__ = ['Celery', 'AppPickler', 'default_app', 'app_or_default',
-           'bugreport', 'enable_trace', 'disable_trace', 'shared_task',
-           'set_default_app', 'current_app', 'current_task',
-           'push_current_task', 'pop_current_task']
+__all__ = [
+    'Celery', 'AppPickler', 'default_app', 'app_or_default',
+    'bugreport', 'enable_trace', 'disable_trace', 'shared_task',
+    'set_default_app', 'current_app', 'current_task',
+    'push_current_task', 'pop_current_task',
+]
 
 #: Proxy always returning the app set as default.
 default_app = Proxy(lambda: _state.default_app)
@@ -91,14 +87,15 @@ else:
 
 def shared_task(*args, **kwargs):
     """Create shared tasks (decorator).
-    Will return a proxy that always takes the task from the current apps
-    task registry.
 
     This can be used by library authors to create tasks that will work
     for any app environment.
 
-    Example:
+    Returns:
+        ~celery.local.Proxy: A proxy that always takes the task from the
+        current apps task registry.
 
+    Example:
         >>> from celery import Celery, shared_task
         >>> @shared_task
         ... def add(x, y):
@@ -110,7 +107,6 @@ def shared_task(*args, **kwargs):
 
         >>> app2 = Celery(broker='amqp://B.example.com')
         >>> add.app is app2
-
     """
 
     def create_shared_task(**options):

+ 21 - 27
celery/app/amqp.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.app.amqp``
-    ~~~~~~~~~~~~~~~~~~~
-
-    Sending and receiving messages using Kombu.
-
-"""
+"""Sending/Receiving Messages (Kombu integration)."""
 from __future__ import absolute_import, unicode_literals
 
 import numbers
@@ -59,15 +53,13 @@ def utf8dict(d, encoding='utf-8'):
 class Queues(dict):
     """Queue name⇒ declaration mapping.
 
-    :param queues: Initial list/tuple or dict of queues.
-    :keyword create_missing: By default any unknown queues will be
-                             added automatically, but if disabled
-                             the occurrence of unknown queues
-                             in `wanted` will raise :exc:`KeyError`.
-    :keyword ha_policy: Default HA policy for queues with none set.
-    :keyword max_priority: Default x-max-priority for queues with none set.
-
-
+    Arguments:
+        queues (Iterable): Initial list/tuple or dict of queues.
+        create_missing (bool): By default any unknown queues will be
+            added automatically, but if this flag is disabled the occurrence
+            of unknown queues in `wanted` will raise :exc:`KeyError`.
+        ha_policy (Sequence, str): Default HA policy for queues with none set.
+        max_priority (int): Default x-max-priority for queues with none set.
     """
     #: If set, this is a subset of queues to consume from.
     #: The rest of the queues are then used for routing only.
@@ -114,12 +106,14 @@ class Queues(dict):
         arguments are ignored, and options are simply taken from the queue
         instance.
 
-        :param queue: :class:`kombu.Queue` instance or name of the queue.
-        :keyword exchange: (if named) specifies exchange name.
-        :keyword routing_key: (if named) specifies binding key.
-        :keyword exchange_type: (if named) specifies type of exchange.
-        :keyword \*\*options: (if named) Additional declaration options.
-
+        Arguments:
+            queue (kombu.Queue, str): Queue to add.
+            exchange (kombu.Exchange, str):
+                if queue is str, specifies exchange name.
+            routing_key (str): if queue is str, specifies binding key.
+            exchange_type (str): if queue is str, specifies type of exchange.
+            **options (Any): Additional declaration options used when
+                queue is a str.
         """
         if not isinstance(queue, Queue):
             return self.add_compat(queue, **kwargs)
@@ -181,8 +175,8 @@ class Queues(dict):
         """Sets :attr:`consume_from` by selecting a subset of the
         currently defined queues.
 
-        :param include: Names of queues to consume from.
-                        Can be iterable or string.
+        Arguments:
+            include (Sequence[str], str): Names of queues to consume from.
         """
         if include:
             self._consume_from = {
@@ -192,9 +186,9 @@ class Queues(dict):
     def deselect(self, exclude):
         """Deselect queues so that they will not be consumed from.
 
-        :param exclude: Names of queues to avoid consuming from.
-                        Can be iterable or string.
-
+        Arguments:
+            exclude (Sequence[str], str): Names of queues to avoid
+                consuming from.
         """
         if exclude:
             exclude = maybe_list(exclude)

+ 5 - 8
celery/app/annotations.py

@@ -1,14 +1,11 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.app.annotations``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Annotations is a nice term for monkey-patching
-    task classes in the configuration.
+"""Task Annotations.
 
-    This prepares and performs the annotations in the
-    :setting:`task_annotations` setting.
+Annotations is a nice term for monkey-patching task classes
+in the configuration.
 
+This prepares and performs the annotations in the
+:setting:`task_annotations` setting.
 """
 from __future__ import absolute_import, unicode_literals
 

+ 115 - 138
celery/app/base.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.app.base``
-    ~~~~~~~~~~~~~~~~~~~
-
-    Actual App instance implementation.
-
-"""
+"""Actual App instance implementation."""
 from __future__ import absolute_import, unicode_literals
 
 import os
@@ -113,31 +107,31 @@ class PendingConfiguration(UserDict, AttributeDictMixin):
 class Celery(object):
     """Celery application.
 
-    :param main: Name of the main module if running as `__main__`.
-        This is used as the prefix for auto-generated task names.
-
-    :keyword broker: URL of the default broker used.
-    :keyword loader: The loader class, or the name of the loader class to use.
-                     Default is :class:`celery.loaders.app.AppLoader`.
-    :keyword backend: The result store backend class, or the name of the
-                      backend class to use. Default is the value of the
-                      :setting:`result_backend` setting.
-    :keyword amqp: AMQP object or class name.
-    :keyword events: Events object or class name.
-    :keyword log: Log object or class name.
-    :keyword control: Control object or class name.
-    :keyword set_as_current:  Make this the global current app.
-    :keyword tasks: A task registry or the name of a registry class.
-    :keyword include: List of modules every worker should import.
-    :keyword fixups: List of fix-up plug-ins (see e.g.
-        :mod:`celery.fixups.django`).
-    :keyword autofinalize: If set to False a :exc:`RuntimeError`
-        will be raised if the task registry or tasks are used before
-        the app is finalized.
-    :keyword config_source: receives a class with class level attributes that
-        allows configurating Celery from a single object. All attributes
-        described in the documentation can be defined.
-
+    Arguments:
+        main (str): Name of the main module if running as `__main__`.
+            This is used as the prefix for auto-generated task names.
+
+        broker (str): URL of the default broker used.
+        loader (str, type): The loader class, or the name of the loader
+            class to use.  Default is :class:`celery.loaders.app.AppLoader`.
+        backend (str, type): The result store backend class, or the name of the
+            backend class to use. Default is the value of the
+            :setting:`result_backend` setting.
+        amqp (str, type): AMQP object or class name.
+        events (str, type): Events object or class name.
+        log (str, type): Log object or class name.
+        control (str, type): Control object or class name.
+        set_as_current (bool):  Make this the global current app.
+        tasks (str, type): A task registry or the name of a registry class.
+        include (List[str]): List of modules every worker should import.
+        fixups (List[str]): List of fix-up plug-ins (see e.g.
+            :mod:`celery.fixups.django`).
+        autofinalize (bool): If set to False a :exc:`RuntimeError`
+            will be raised if the task registry or tasks are used before
+            the app is finalized.
+        config_source (str, type): receives a class with class level attributes
+            that allows configurating Celery from a single object.
+            All attributes described in the documentation can be defined.
     """
     #: This is deprecated, use :meth:`reduce_keys` instead
     Pickler = AppPickler
@@ -304,7 +298,6 @@ class Celery(object):
         """Run :program:`celery` using `argv`.
 
         Uses :data:`sys.argv` if `argv` is not specified.
-
         """
         return instantiate(
             'celery.bin.celery:CeleryCommand', app=self
@@ -314,7 +307,6 @@ class Celery(object):
         """Run :program:`celery worker` using `argv`.
 
         Uses :data:`sys.argv` if `argv` is not specified.
-
         """
         return instantiate(
             'celery.bin.worker:worker', app=self
@@ -324,31 +316,28 @@ class Celery(object):
         """Decorator to create a task class out of any callable.
 
         Examples:
+            .. code-block:: python
 
-        .. code-block:: python
-
-            @app.task
-            def refresh_feed(url):
-                store_feed(feedparser.parse(url))
-
-        with setting extra options:
+                @app.task
+                def refresh_feed(url):
+                    store_feed(feedparser.parse(url))
 
-        .. code-block:: python
+            with setting extra options:
 
-            @app.task(exchange='feeds')
-            def refresh_feed(url):
-                return store_feed(feedparser.parse(url))
+            .. code-block:: python
 
-        .. admonition:: App Binding
+                @app.task(exchange='feeds')
+                def refresh_feed(url):
+                    return store_feed(feedparser.parse(url))
 
-            For custom apps the task decorator will return a proxy
-            object, so that the act of creating the task is not performed
-            until the task is used or the task registry is accessed.
+        Note:
+            App Binding: For custom apps the task decorator will return
+            a proxy object, so that the act of creating the task is not
+            performed until the task is used or the task registry is accessed.
 
             If you are depending on binding to be deferred, then you must
             not access any attributes on the returned object until the
             application is fully set up (finalized).
-
         """
         if USING_EXECV and opts.get('lazy', True):
             # When using execv the task in the original module will point to a
@@ -473,7 +462,6 @@ class Celery(object):
         not be transferred when the worker spawns child processes, so
         it's important that the same configuration happens at import time
         when pickle restores the object on the other side.
-
         """
         if not callable(fun):
             d, fun = fun, lambda: d
@@ -486,18 +474,16 @@ class Celery(object):
         """Reads configuration from object, where object is either
         an object or the name of a module to import.
 
-        :keyword silent: If true then import errors will be ignored.
-
-        :keyword force:  Force reading configuration immediately.
-            By default the configuration will be read only when required.
-
-        .. code-block:: pycon
-
+        Example:
             >>> celery.config_from_object('myapp.celeryconfig')
 
             >>> from myapp import celeryconfig
             >>> celery.config_from_object(celeryconfig)
 
+        Arguments:
+            silent (bool): If true then import errors will be ignored.
+            force (bool): Force reading configuration immediately.
+                By default the configuration will be read only when required.
         """
         self._config_source = obj
         self.namespace = namespace or self.namespace
@@ -516,7 +502,6 @@ class Celery(object):
 
             >>> os.environ['CELERY_CONFIG_MODULE'] = 'myapp.celeryconfig'
             >>> celery.config_from_envvar('CELERY_CONFIG_MODULE')
-
         """
         module_name = os.environ.get(variable_name)
         if not module_name:
@@ -541,21 +526,20 @@ class Celery(object):
         serializer will register the ``auth`` serializer with the provided
         settings into the Kombu serializer registry.
 
-        :keyword allowed_serializers: List of serializer names, or
-            content_types that should be exempt from being disabled.
-        :keyword key: Name of private key file to use.
-            Defaults to the :setting:`security_key` setting.
-        :keyword cert: Name of certificate file to use.
-            Defaults to the :setting:`security_certificate` setting.
-        :keyword store: Directory containing certificates.
-            Defaults to the :setting:`security_cert_store` setting.
-        :keyword digest: Digest algorithm used when signing messages.
-            Default is ``sha1``.
-        :keyword serializer: Serializer used to encode messages after
-            they have been signed.  See :setting:`task_serializer` for
-            the serializers supported.
-            Default is ``json``.
-
+        Arguments:
+            allowed_serializers (Set[str]): List of serializer names, or
+                content_types that should be exempt from being disabled.
+            key (str): Name of private key file to use.
+                Defaults to the :setting:`security_key` setting.
+            cert (str): Name of certificate file to use.
+                Defaults to the :setting:`security_certificate` setting.
+            store (str): Directory containing certificates.
+                Defaults to the :setting:`security_cert_store` setting.
+            digest (str): Digest algorithm used when signing messages.
+                Default is ``sha1``.
+            serializer (str): Serializer used to encode messages after
+                they have been signed.  See :setting:`task_serializer` for
+                the serializers supported.  Default is ``json``.
         """
         from celery.security import setup_security
         return setup_security(allowed_serializers, key, cert,
@@ -586,17 +570,17 @@ class Celery(object):
         Then calling ``app.autodiscover_tasks(['foo', bar', 'baz'])`` will
         result in the modules ``foo.tasks`` and ``bar.tasks`` being imported.
 
-        :param packages: List of packages to search.
-            This argument may also be a callable, in which case the
-            value returned is used (for lazy evaluation).
-        :keyword related_name: The name of the module to find.  Defaults
-            to "tasks", which means it look for "module.tasks" for every
-            module in ``packages``.
-        :keyword force: By default this call is lazy so that the actual
-            auto-discovery will not happen until an application imports the
-            default modules.  Forcing will cause the auto-discovery to happen
-            immediately.
-
+        Arguments:
+            packages (List[str]): List of packages to search.
+                This argument may also be a callable, in which case the
+                value returned is used (for lazy evaluation).
+            related_name (str): The name of the module to find.  Defaults
+                to "tasks", which means it look for "module.tasks" for every
+                module in ``packages``.
+            force (bool): By default this call is lazy so that the actual
+                auto-discovery will not happen until an application imports
+                the default modules.  Forcing will cause the auto-discovery
+                to happen immediately.
         """
         if force:
             return self._autodiscover_tasks(packages, related_name)
@@ -632,12 +616,11 @@ class Celery(object):
                   shadow=None, chain=None, task_type=None, **options):
         """Send task by name.
 
-        :param name: Name of task to call (e.g. `"tasks.add"`).
-        :keyword result_cls: Specify custom result class. Default is
-            using :meth:`AsyncResult`.
-
-        Otherwise supports the same arguments as :meth:`@-Task.apply_async`.
+        Supports the same arguments as :meth:`@-Task.apply_async`.
 
+        Arguments:
+            name (str): Name of task to call (e.g. `"tasks.add"`).
+            result_cls (~@AsyncResult): Specify custom result class.
         """
         parent = have_parent = None
         amqp = self.amqp
@@ -688,7 +671,6 @@ class Celery(object):
         """Establish connection used for consuming.
 
         See :meth:`connection` for supported arguments.
-
         """
         return self._connection(url or self.conf.broker_read_url, **kwargs)
 
@@ -696,7 +678,6 @@ class Celery(object):
         """Establish connection used for producing.
 
         See :meth:`connection` for supported arguments.
-
         """
         return self._connection(url or self.conf.broker_write_url, **kwargs)
 
@@ -711,26 +692,28 @@ class Celery(object):
         :meth:`connection_for_write` instead, to convey the intent
         of use for this connection.
 
-        :param url: Either the URL or the hostname of the broker to use.
-
-        :keyword hostname: URL, Hostname/IP-address of the broker.
-            If a URL is used, then the other argument below will
-            be taken from the URL instead.
-        :keyword userid: Username to authenticate as.
-        :keyword password: Password to authenticate with
-        :keyword virtual_host: Virtual host to use (domain).
-        :keyword port: Port to connect to.
-        :keyword ssl: Defaults to the :setting:`broker_use_ssl` setting.
-        :keyword transport: defaults to the :setting:`broker_transport`
-                 setting.
-        :keyword transport_options: Dictionary of transport specific options.
-        :keyword heartbeat: AMQP Heartbeat in seconds (``pyamqp`` only).
-        :keyword login_method: Custom login method to use (AMQP only).
-        :keyword failover_strategy: Custom failover strategy.
-        :keyword \*\*kwargs: Additional arguments to :class:`kombu.Connection`.
-
-        :returns: :class:`kombu.Connection`
-
+        Arguments:
+            url: Either the URL or the hostname of the broker to use.
+
+            hostname (str): URL, Hostname/IP-address of the broker.
+                If a URL is used, then the other argument below will
+                be taken from the URL instead.
+            userid (str): Username to authenticate as.
+            password (str): Password to authenticate with
+            virtual_host (str): Virtual host to use (domain).
+            port (int): Port to connect to.
+            ssl (bool, Dict): Defaults to the :setting:`broker_use_ssl`
+                setting.
+            transport (str): defaults to the :setting:`broker_transport`
+                setting.
+            transport_options (Dict): Dictionary of transport specific options.
+            heartbeat (int): AMQP Heartbeat in seconds (``pyamqp`` only).
+            login_method (str): Custom login method to use (AMQP only).
+            failover_strategy (str, Callable): Custom failover strategy.
+            **kwargs: Additional arguments to :class:`kombu.Connection`.
+
+        Returns:
+            kombu.Connection: the lazy connection instance.
         """
         return self.connection_for_write(
             hostname or self.conf.broker_write_url,
@@ -780,8 +763,9 @@ class Celery(object):
         """For use within a :keyword:`with` statement to get a connection
         from the pool if one is not already provided.
 
-        :keyword connection: If not provided, then a connection will be
-                             acquired from the connection pool.
+        Arguments:
+            connection (kombu.Connection): If not provided, a connection
+                will be acquired from the connection pool.
         """
         return FallbackContext(connection, self._acquire_connection, pool=pool)
     default_connection = connection_or_acquire  # XXX compat
@@ -790,9 +774,9 @@ class Celery(object):
         """For use within a :keyword:`with` statement to get a producer
         from the pool if one is not already provided
 
-        :keyword producer: If not provided, then a producer will be
-                           acquired from the producer pool.
-
+        Arguments:
+            producer (kombu.Producer): If not provided, a producer
+                will be acquired from the producer pool.
         """
         return FallbackContext(
             producer, self.producer_pool.acquire, block=True,
@@ -878,7 +862,6 @@ class Celery(object):
         """Return a new :class:`~celery.canvas.Signature` bound to this app.
 
         See :meth:`~celery.signature`
-
         """
         kwargs['app'] = self
         return self.canvas.signature(*args, **kwargs)
@@ -926,15 +909,15 @@ class Celery(object):
         provides the default app it should use, e.g.
         ``class Foo: app = None``.
 
-        :param Class: The app-compatible class to subclass.
-        :keyword name: Custom name for the target class.
-        :keyword attribute: Name of the attribute holding the app,
-                            default is 'app'.
-        :keyword reverse: Reverse path to this object used for pickling
-            purposes.  E.g. for ``app.AsyncResult`` use ``"AsyncResult"``.
-        :keyword keep_reduce: If enabled a custom ``__reduce__`` implementation
-           will not be provided.
-
+        Arguments:
+            Class (type): The app-compatible class to subclass.
+            name (str): Custom name for the target class.
+            attribute (str): Name of the attribute holding the app,
+                Default is 'app'.
+            reverse (str): Reverse path to this object used for pickling
+                purposes.  E.g. for ``app.AsyncResult`` use ``"AsyncResult"``.
+            keep_reduce (bool): If enabled a custom ``__reduce__``
+                implementation will not be provided.
         """
         Class = symbol_by_name(Class)
         reverse = reverse if reverse else Class.__name__
@@ -942,10 +925,11 @@ class Celery(object):
         def __reduce__(self):
             return _unpickle_appattr, (reverse, self.__reduce_args__())
 
-        attrs = dict({attribute: self},
-                     __module__=Class.__module__,
-                     __doc__=Class.__doc__,
-                     **kw)
+        attrs = dict(
+            {attribute: self},
+            __module__=Class.__module__,
+            __doc__=Class.__doc__,
+            **kw)
         if not keep_reduce:
             attrs['__reduce__'] = __reduce__
 
@@ -1012,7 +996,6 @@ class Celery(object):
         """:program:`celery beat` scheduler application.
 
         See :class:`~@Beat`.
-
         """
         return self.subclass_with_self('celery.apps.beat:Beat')
 
@@ -1030,7 +1013,6 @@ class Celery(object):
         """Create new result instance.
 
         See :class:`celery.result.AsyncResult`.
-
         """
         return self.subclass_with_self('celery.result:AsyncResult')
 
@@ -1043,7 +1025,6 @@ class Celery(object):
         """Create new group result instance.
 
         See :class:`celery.result.GroupResult`.
-
         """
         return self.subclass_with_self('celery.result:GroupResult')
 
@@ -1052,7 +1033,6 @@ class Celery(object):
         """Broker connection pool: :class:`~@pool`.
 
         This attribute is not related to the workers concurrency pool.
-
         """
         if self._pool is None:
             self._ensure_after_fork()
@@ -1073,7 +1053,6 @@ class Celery(object):
 
         Differs from :data:`current_task` in that it's not affected
         by tasks calling other tasks directly, or eagerly.
-
         """
         return get_current_worker_task()
 
@@ -1137,7 +1116,6 @@ class Celery(object):
         """Task registry.
 
         Accessing this attribute will also finalize the app.
-
         """
         self.finalize(auto=True)
         return self._tasks
@@ -1152,7 +1130,6 @@ class Celery(object):
 
         This is a cached property taking the time zone from the
         :setting:`timezone` setting.
-
         """
         conf = self.conf
         tz = conf.timezone

+ 2 - 9
celery/app/builtins.py

@@ -1,12 +1,7 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.app.builtins``
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Built-in tasks that are always available in all
-    app instances. E.g. :class:`@chord`, :class:`@group`
-    and :class:`@xmap`.
+"""Built-in Tasks.
 
+The built-in tasks are always available in all app instances.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -26,7 +21,6 @@ def add_backend_cleanup_task(app):
     If the configured backend requires periodic cleanup this task is also
     automatically configured to run every day at 4am (requires
     :program:`celery beat` to be running).
-
     """
     @app.task(name='celery.backend_cleanup', shared=False, lazy=False)
     def backend_cleanup():
@@ -49,7 +43,6 @@ def add_unlock_chord_task(app):
     """This task is used by result backends without native chord support.
 
     It joins chords by creating a task chain polling the header for completion.
-
     """
     from celery.canvas import maybe_signature
     from celery.exceptions import ChordError

+ 74 - 62
celery/app/control.py

@@ -1,11 +1,8 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.app.control
-    ~~~~~~~~~~~~~~~~~~~
-
-    Client for worker remote control commands.
-    Server implementation is in :mod:`celery.worker.control`.
+"""Worker Remote Control Client.
 
+Client for worker remote control commands.
+Server implementation is in :mod:`celery.worker.control`.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -146,8 +143,13 @@ class Control(object):
         This will ignore all tasks waiting for execution, and they will
         be deleted from the messaging server.
 
-        :returns: the number of tasks discarded.
+        Arguments:
+            connection (kombu.Connection): Optional specific connection
+                instance to use.  If not provided a connection will
+                be acquired from the connection pool.
 
+        Returns:
+            int: the number of tasks discarded.
         """
         with self.app.connection_or_acquire(connection) as conn:
             return self.app.amqp.TaskConsumer(conn).purge()
@@ -165,14 +167,15 @@ class Control(object):
         If a task is revoked, the workers will ignore the task and
         not execute it after all.
 
-        :param task_id: Id of the task to revoke.
-        :keyword terminate: Also terminate the process currently working
-            on the task (if any).
-        :keyword signal: Name of signal to send to process if terminate.
-            Default is TERM.
-
-        See :meth:`broadcast` for supported keyword arguments.
+        Arguments:
+            task_id (str): Id of the task to revoke.
+            terminate (bool): Also terminate the process currently working
+                on the task (if any).
+            signal (str): Name of signal to send to process if terminate.
+                Default is TERM.
 
+        See Also:
+            :meth:`broadcast` for supported keyword arguments.
         """
         return self.broadcast('revoke', destination=destination,
                               arguments={'task_id': task_id,
@@ -182,10 +185,11 @@ class Control(object):
     def ping(self, destination=None, timeout=1, **kwargs):
         """Ping all (or specific) workers.
 
-        Will return the list of answers.
-
-        See :meth:`broadcast` for supported keyword arguments.
+        Returns:
+            List[Dict]: List of ``{'hostname': reply}`` dictionaries.
 
+        See Also:
+            :meth:`broadcast` for supported keyword arguments.
         """
         return self.broadcast('ping', reply=True, destination=destination,
                               timeout=timeout, **kwargs)
@@ -194,14 +198,15 @@ class Control(object):
         """Tell all (or specific) workers to set a new rate limit
         for task by type.
 
-        :param task_name: Name of task to change rate limit for.
-        :param rate_limit: The rate limit as tasks per second, or a rate limit
-            string (`'100/m'`, etc.
-            see :attr:`celery.task.base.Task.rate_limit` for
-            more information).
-
-        See :meth:`broadcast` for supported keyword arguments.
+        Arguments:
+            task_name (str): Name of task to change rate limit for.
+            rate_limit (int, str): The rate limit as tasks per second,
+                or a rate limit string (`'100/m'`, etc.
+                see :attr:`celery.task.base.Task.rate_limit` for
+                more information).
 
+        See Also:
+            :meth:`broadcast` for supported keyword arguments.
         """
         return self.broadcast('rate_limit', destination=destination,
                               arguments={'task_name': task_name,
@@ -216,21 +221,21 @@ class Control(object):
         then the exchange/routing key will be set to the same name (
         like automatic queues do).
 
-        .. note::
-
+        Note:
             This command does not respect the default queue/exchange
             options in the configuration.
 
-        :param queue: Name of queue to start consuming from.
-        :keyword exchange: Optional name of exchange.
-        :keyword exchange_type: Type of exchange (defaults to 'direct')
-            command to, when empty broadcast to all workers.
-        :keyword routing_key: Optional routing key.
-        :keyword options: Additional options as supported
-            by :meth:`kombu.entitiy.Queue.from_dict`.
-
-        See :meth:`broadcast` for supported keyword arguments.
-
+        Arguments:
+            queue (str): Name of queue to start consuming from.
+            exchange (str): Optional name of exchange.
+            exchange_type (str): Type of exchange (defaults to 'direct')
+                command to, when empty broadcast to all workers.
+            routing_key (str): Optional routing key.
+            options (Dict): Additional options as supported
+                by :meth:`kombu.entitiy.Queue.from_dict`.
+
+        See Also:
+            :meth:`broadcast` for supported keyword arguments.
         """
         return self.broadcast(
             'add_consumer',
@@ -243,8 +248,8 @@ class Control(object):
     def cancel_consumer(self, queue, **kwargs):
         """Tell all (or specific) workers to stop consuming from ``queue``.
 
-        Supports the same keyword arguments as :meth:`broadcast`.
-
+        See Also:
+            Supports the same arguments as :meth:`broadcast`.
         """
         return self.broadcast(
             'cancel_consumer', arguments={'queue': queue}, **kwargs
@@ -254,12 +259,11 @@ class Control(object):
         """Tell all (or specific) workers to set time limits for
         a task by type.
 
-        :param task_name: Name of task to change time limits for.
-        :keyword soft: New soft time limit (in seconds).
-        :keyword hard: New hard time limit (in seconds).
-
-        Any additional keyword arguments are passed on to :meth:`broadcast`.
-
+        Arguments:
+            task_name (str): Name of task to change time limits for.
+            soft (float): New soft time limit (in seconds).
+            hard (float): New hard time limit (in seconds).
+            **kwargs (Any): arguments passed on to :meth:`broadcast`.
         """
         return self.broadcast(
             'time_limit',
@@ -267,26 +271,34 @@ class Control(object):
                        'hard': hard, 'soft': soft}, **kwargs)
 
     def enable_events(self, destination=None, **kwargs):
-        """Tell all (or specific) workers to enable events."""
+        """Tell all (or specific) workers to enable events.
+
+        See Also:
+            Supports the same arguments as :meth:`broadcast`.
+        """
         return self.broadcast('enable_events', {}, destination, **kwargs)
 
     def disable_events(self, destination=None, **kwargs):
-        """Tell all (or specific) workers to disable events."""
+        """Tell all (or specific) workers to disable events.
+
+        See Also:
+            Supports the same arguments as :meth:`broadcast`.
+        """
         return self.broadcast('disable_events', {}, destination, **kwargs)
 
     def pool_grow(self, n=1, destination=None, **kwargs):
         """Tell all (or specific) workers to grow the pool by ``n``.
 
-        Supports the same arguments as :meth:`broadcast`.
-
+        See Also:
+            Supports the same arguments as :meth:`broadcast`.
         """
         return self.broadcast('pool_grow', {'n': n}, destination, **kwargs)
 
     def pool_shrink(self, n=1, destination=None, **kwargs):
         """Tell all (or specific) workers to shrink the pool by ``n``.
 
-        Supports the same arguments as :meth:`broadcast`.
-
+        See Also:
+            Supports the same arguments as :meth:`broadcast`.
         """
         return self.broadcast('pool_shrink', {'n': n}, destination, **kwargs)
 
@@ -295,18 +307,18 @@ class Control(object):
                   callback=None, channel=None, **extra_kwargs):
         """Broadcast a control command to the celery workers.
 
-        :param command: Name of command to send.
-        :param arguments: Keyword arguments for the command.
-        :keyword destination: If set, a list of the hosts to send the
-            command to, when empty broadcast to all workers.
-        :keyword connection: Custom broker connection to use, if not set,
-            a connection will be established automatically.
-        :keyword reply: Wait for and return the reply.
-        :keyword timeout: Timeout in seconds to wait for the reply.
-        :keyword limit: Limit number of replies.
-        :keyword callback: Callback called immediately for each reply
-            received.
-
+        Arguments:
+            command (str): Name of command to send.
+            arguments (Dict): Keyword arguments for the command.
+            destination (List): If set, a list of the hosts to send the
+                command to, when empty broadcast to all workers.
+            connection (kombu.Connection): Custom broker connection to use,
+                if not set, a connection will be acquired from the pool.
+            reply (bool): Wait for and return the reply.
+            timeout (float): Timeout in seconds to wait for the reply.
+            limit (int): Limit number of replies.
+            callback (Callable): Callback called immediately for
+                each reply received.
         """
         with self.app.connection_or_acquire(connection) as conn:
             arguments = dict(arguments or {}, **extra_kwargs)

+ 1 - 7
celery/app/defaults.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.app.defaults
-    ~~~~~~~~~~~~~~~~~~~
-
-    Configuration introspection and defaults.
-
-"""
+"""Configuration introspection and defaults."""
 from __future__ import absolute_import, unicode_literals
 
 import sys

+ 9 - 12
celery/app/log.py

@@ -1,14 +1,11 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.app.log
-    ~~~~~~~~~~~~~~
-
-    The Celery instances logging section: ``Celery.log``.
+"""Logging configuration.
 
-    Sets up logging for the worker and other programs,
-    redirects standard outs, colors log output, patches logging
-    related compatibility fixes, and so on.
+The Celery instances logging section: ``Celery.log``.
 
+Sets up logging for the worker and other programs,
+redirects standard outs, colors log output, patches logging
+related compatibility fixes, and so on.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -162,7 +159,6 @@ class Logging(object):
         If `logfile` is not specified, then `sys.stderr` is used.
 
         Will return the base task logger object.
-
         """
         loglevel = mlevel(loglevel or self.loglevel)
         format = format or self.task_format
@@ -188,9 +184,10 @@ class Logging(object):
         """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
         logging instance.
 
-        :param logger: The :class:`logging.Logger` instance to redirect to.
-        :param loglevel: The loglevel redirected messages will be logged as.
-
+        Arguments:
+            logger (logging.Logger): Logger instance to redirect to.
+            loglevel (int, str): The loglevel redirected message
+                will be logged as.
         """
         proxy = LoggingProxy(logger, loglevel)
         if stdout:

+ 6 - 13
celery/app/registry.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.app.registry
-    ~~~~~~~~~~~~~~~~~~~
-
-    Registry of available tasks.
-
-"""
+"""Registry of available tasks."""
 from __future__ import absolute_import, unicode_literals
 
 import inspect
@@ -30,19 +24,18 @@ class TaskRegistry(dict):
 
         The task will be automatically instantiated if not already an
         instance.
-
         """
         self[task.name] = inspect.isclass(task) and task() or task
 
     def unregister(self, name):
         """Unregister task by name.
 
-        :param name: name of the task to unregister, or a
-            :class:`celery.task.base.Task` with a valid `name` attribute.
-
-        :raises celery.exceptions.NotRegistered: if the task has not
-            been registered.
+        Arguments:
+            name (str): name of the task to unregister, or a
+                :class:`celery.task.base.Task` with a valid `name` attribute.
 
+        Raises:
+            celery.exceptions.NotRegistered: if the task is not registered.
         """
         try:
             self.pop(getattr(name, 'name', name))

+ 2 - 6
celery/app/routes.py

@@ -1,11 +1,7 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.routes
-    ~~~~~~~~~~~~~
-
-    Contains utilities for working with task routers,
-    (:setting:`task_routes`).
+"""Task Routing.
 
+Contains utilities for working with task routers, (:setting:`task_routes`).
 """
 from __future__ import absolute_import, unicode_literals
 

+ 220 - 242
celery/app/task.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.app.task
-    ~~~~~~~~~~~~~~~
-
-    Task Implementation: Task request context, and the base task class.
-
-"""
+"""Task implementation: request context and the task base class."""
 from __future__ import absolute_import, unicode_literals
 
 import sys
@@ -67,7 +61,6 @@ def _reprtask(task, fmt=None, flags=None):
 
 @python_2_unicode_compatible
 class Context(object):
-    # Default context
     logfile = None
     loglevel = None
     hostname = None
@@ -149,7 +142,6 @@ class Task(object):
     When called tasks apply the :meth:`run` method.  This method must
     be defined by all tasks (that is unless the :meth:`__call__` method
     is overridden).
-
     """
     __trace__ = None
     __v2_compat__ = False  # set by old base in celery.task.base
@@ -385,11 +377,11 @@ class Task(object):
 
         Does not support the extra options enabled by :meth:`apply_async`.
 
-        :param \*args: positional arguments passed on to the task.
-        :param \*\*kwargs: keyword arguments passed on to the task.
-
-        :returns: :class:`celery.result.AsyncResult`
-
+        Arguments:
+            *args (Any): Positional arguments passed on to the task.
+            **kwargs (Any): Keyword arguments passed on to the task.
+        Returns:
+            celery.result.AsyncResult: Future promise.
         """
         return self.apply_async(args, kwargs)
 
@@ -397,103 +389,91 @@ class Task(object):
                     link=None, link_error=None, shadow=None, **options):
         """Apply tasks asynchronously by sending a message.
 
-        :keyword args: The positional arguments to pass on to the
-                       task (a :class:`list` or :class:`tuple`).
-
-        :keyword kwargs: The keyword arguments to pass on to the
-                         task (a :class:`dict`)
-
-        :keyword countdown: Number of seconds into the future that the
-                            task should execute. Defaults to immediate
-                            execution.
-
-        :keyword eta: A :class:`~datetime.datetime` object describing
-                      the absolute time and date of when the task should
-                      be executed.  May not be specified if `countdown`
-                      is also supplied.
-
-        :keyword expires: Either a :class:`int`, describing the number of
-                          seconds, or a :class:`~datetime.datetime` object
-                          that describes the absolute time and date of when
-                          the task should expire.  The task will not be
-                          executed after the expiration time.
-
-        :keyword shadow: Override task name used in logs/monitoring
-            (default from :meth:`shadow_name`).
-
-        :keyword connection: Re-use existing broker connection instead
-                             of establishing a new one.
-
-        :keyword retry: If enabled sending of the task message will be retried
-                        in the event of connection loss or failure.  Default
-                        is taken from the :setting:`task_publish_retry`
-                        setting.  Note that you need to handle the
-                        producer/connection manually for this to work.
-
-        :keyword retry_policy:  Override the retry policy used.  See the
-                                :setting:`task_publish_retry_policy`
-                                setting.
-
-        :keyword routing_key: Custom routing key used to route the task to a
-                              worker server. If in combination with a
-                              ``queue`` argument only used to specify custom
-                              routing keys to topic exchanges.
-
-        :keyword queue: The queue to route the task to.  This must be a key
-                        present in :setting:`task_queues`, or
-                        :setting:`task_create_missing_queues` must be
-                        enabled.  See :ref:`guide-routing` for more
-                        information.
-
-        :keyword exchange: Named custom exchange to send the task to.
-                           Usually not used in combination with the ``queue``
-                           argument.
-
-        :keyword priority: The task priority, a number between 0 and 9.
-                           Defaults to the :attr:`priority` attribute.
-
-        :keyword serializer: A string identifying the default
-                             serialization method to use.  Can be `pickle`,
-                             `json`, `yaml`, `msgpack` or any custom
-                             serialization method that has been registered
-                             with :mod:`kombu.serialization.registry`.
-                             Defaults to the :attr:`serializer` attribute.
-
-        :keyword compression: A string identifying the compression method
-                              to use.  Can be one of ``zlib``, ``bzip2``,
-                              or any custom compression methods registered with
-                              :func:`kombu.compression.register`. Defaults to
-                              the :setting:`task_compression`
-                              setting.
-        :keyword link: A single, or a list of tasks to apply if the
-                       task exits successfully.
-        :keyword link_error: A single, or a list of tasks to apply
-                      if an error occurs while executing the task.
-
-        :keyword producer: :class:`kombu.Producer` instance to use.
-
-        :keyword add_to_parent: If set to True (default) and the task
-            is applied while executing another task, then the result
-            will be appended to the parent tasks ``request.children``
-            attribute.  Trailing can also be disabled by default using the
-            :attr:`trail` attribute
-
-        :keyword publisher: Deprecated alias to ``producer``.
-
-        :keyword headers: Message headers to be sent in the
-            task (a :class:`dict`)
-
-        :rtype :class:`celery.result.AsyncResult`: if
-            :setting:`task_always_eager` is not set, otherwise
-            :class:`celery.result.EagerResult`:
-
-        Also supports all keyword arguments supported by
-        :meth:`kombu.Producer.publish`.
-
-        .. note::
-            If the :setting:`task_always_eager` setting is set, it will
-            be replaced by a local :func:`apply` call instead.
+        Arguments:
+            args (Tuple): The positional arguments to pass on to the task.
+
+            kwargs (Dict): The keyword arguments to pass on to the task.
+
+            countdown (float): Number of seconds into the future that the
+                task should execute.  Defaults to immediate execution.
+
+            eta (~datetime.datetime): Absolute time and date of when the task
+                should be executed.  May not be specified if `countdown`
+                is also supplied.
+
+            expires (float, ~datetime.datetime): Datetime or
+                seconds in the future for the task should expire.
+                The task will not be executed after the expiration time.
+
+            shadow (str): Override task name used in logs/monitoring.
+                Default is retrieved from :meth:`shadow_name`.
+
+            connection (kombu.Connection): Re-use existing broker connection
+                instead of acquiring one from the connection pool.
+
+            retry (bool): If enabled sending of the task message will be
+                retried in the event of connection loss or failure.
+                Default is taken from the :setting:`task_publish_retry`
+                setting.  Note that you need to handle the
+                producer/connection manually for this to work.
+
+            retry_policy (Mapping): Override the retry policy used.
+                See the :setting:`task_publish_retry_policy` setting.
+
+            queue (str, kombu.Queue): The queue to route the task to.
+                This must be a key present in :setting:`task_queues`, or
+                :setting:`task_create_missing_queues` must be
+                enabled.  See :ref:`guide-routing` for more
+                information.
+
+            exchange (str, kombu.Exchange): Named custom exchange to send the
+                task to.  Usually not used in combination with the ``queue``
+                argument.
+
+            routing_key (str): Custom routing key used to route the task to a
+                worker server. If in combination with a ``queue`` argument
+                only used to specify custom routing keys to topic exchanges.
+
+            priority (int): The task priority, a number between 0 and 9.
+                Defaults to the :attr:`priority` attribute.
+
+            serializer (str): Serialization method to use.
+                Can be `pickle`, `json`, `yaml`, `msgpack` or any custom
+                serialization method that has been registered
+                with :mod:`kombu.serialization.registry`.
+                Defaults to the :attr:`serializer` attribute.
+
+            compression (str): Optional compression method
+                to use.  Can be one of ``zlib``, ``bzip2``,
+                or any custom compression methods registered with
+                :func:`kombu.compression.register`. Defaults to
+                the :setting:`task_compression` setting.
+
+            link (~@Signature): A single, or a list of tasks signatures
+                to apply if the task returns successfully.
+
+            link_error (~@Signature): A single, or a list of task signatures
+                to apply if an error occurs while executing the task.
+
+            producer (kombu.Producer): custom producer to use when publishing
+                the task.
+
+            add_to_parent (bool): If set to True (default) and the task
+                is applied while executing another task, then the result
+                will be appended to the parent tasks ``request.children``
+                attribute.  Trailing can also be disabled by default using the
+                :attr:`trail` attribute
 
+            publisher (kombu.Producer): Deprecated alias to ``producer``.
+
+            headers (Dict): Message headers to be included in the message.
+
+        Returns:
+            ~@AsyncResult: Future promise.
+
+        Note:
+            Also supports all keyword arguments supported by
+            :meth:`kombu.Producer.publish`.
         """
         try:
             check_arguments = self.__header__
@@ -524,23 +504,22 @@ class Task(object):
     def shadow_name(self, args, kwargs, options):
         """Override for custom task name in worker logs/monitoring.
 
-        :param args: Task positional arguments.
-        :param kwargs: Task keyword arguments.
-        :param options: Task execution options.
+        Example:
+            .. code-block:: python
 
-        **Example**:
+                from celery.utils.imports import qualname
 
-        .. code-block:: python
+                def shadow_name(task, args, kwargs, options):
+                    return qualname(args[0])
 
-            from celery.utils.imports import qualname
-
-            def shadow_name(task, args, kwargs, options):
-                return qualname(args[0])
-
-            @app.task(shadow_name=shadow_name, serializer='pickle')
-            def apply_function_async(fun, *args, **kwargs):
-                return fun(*args, **kwargs)
+                @app.task(shadow_name=shadow_name, serializer='pickle')
+                def apply_function_async(fun, *args, **kwargs):
+                    return fun(*args, **kwargs)
 
+        Arguments:
+            args (Tuple): Task positional arguments.
+            kwargs (Dict): Task keyword arguments.
+            options (Dict): Task execution options.
         """
         pass
 
@@ -562,48 +541,7 @@ class Task(object):
               eta=None, countdown=None, max_retries=None, **options):
         """Retry the task.
 
-        :param args: Positional arguments to retry with.
-        :param kwargs: Keyword arguments to retry with.
-        :keyword exc: Custom exception to report when the max restart
-            limit has been exceeded (default:
-            :exc:`~@MaxRetriesExceededError`).
-
-            If this argument is set and retry is called while
-            an exception was raised (``sys.exc_info()`` is set)
-            it will attempt to re-raise the current exception.
-
-            If no exception was raised it will raise the ``exc``
-            argument provided.
-        :keyword countdown: Time in seconds to delay the retry for.
-        :keyword eta: Explicit time and date to run the retry at
-                      (must be a :class:`~datetime.datetime` instance).
-        :keyword max_retries: If set, overrides the default retry limit for
-            this execution. Changes to this parameter do not propagate to
-            subsequent task retry attempts. A value of :const:`None`, means
-            "use the default", so if you want infinite retries you would
-            have to set the :attr:`max_retries` attribute of the task to
-            :const:`None` first.
-        :keyword time_limit: If set, overrides the default time limit.
-        :keyword soft_time_limit: If set, overrides the default soft
-                                  time limit.
-        :keyword \*\*options: Any extra options to pass on to
-                              :meth:`apply_async`.
-        :keyword throw: If this is :const:`False`, do not raise the
-                        :exc:`~@Retry` exception,
-                        that tells the worker to mark the task as being
-                        retried.  Note that this means the task will be
-                        marked as failed if the task raises an exception,
-                        or successful if it returns.
-
-        :raises celery.exceptions.Retry: To tell the worker that
-            the task has been re-sent for retry. This always happens,
-            unless the `throw` keyword argument has been explicitly set
-            to :const:`False`, and is considered normal operation.
-
-        **Example**
-
-        .. code-block:: pycon
-
+        Example:
             >>> from imaginary_twitter_lib import Twitter
             >>> from proj.celery import app
 
@@ -616,10 +554,49 @@ class Task(object):
             ...         # Retry in 5 minutes.
             ...         raise self.retry(countdown=60 * 5, exc=exc)
 
-        Although the task will never return above as `retry` raises an
-        exception to notify the worker, we use `raise` in front of the retry
-        to convey that the rest of the block will not be executed.
-
+        Note:
+            Although the task will never return above as `retry` raises an
+            exception to notify the worker, we use `raise` in front of the
+            retry to convey that the rest of the block will not be executed.
+
+        Arguments:
+            args (Tuple): Positional arguments to retry with.
+            kwargs (Dict): Keyword arguments to retry with.
+            exc (Exception): Custom exception to report when the max restart
+                limit has been exceeded (default:
+                :exc:`~@MaxRetriesExceededError`).
+
+                If this argument is set and retry is called while
+                an exception was raised (``sys.exc_info()`` is set)
+                it will attempt to re-raise the current exception.
+
+                If no exception was raised it will raise the ``exc``
+                argument provided.
+            countdown (float): Time in seconds to delay the retry for.
+            eta (~datetime.dateime): Explicit time and date to run the
+                retry at.
+            max_retries (int): If set, overrides the default retry limit for
+                this execution. Changes to this parameter do not propagate to
+                subsequent task retry attempts. A value of :const:`None`, means
+                "use the default", so if you want infinite retries you would
+                have to set the :attr:`max_retries` attribute of the task to
+                :const:`None` first.
+            time_limit (int): If set, overrides the default time limit.
+            soft_time_limit (int): If set, overrides the default soft
+                time limit.
+            throw (bool): If this is :const:`False`, do not raise the
+                :exc:`~@Retry` exception, that tells the worker to mark
+                the task as being retried.  Note that this means the task
+                will be marked as failed if the task raises an exception,
+                or successful if it returns after the retry call.
+            **options (Any): Extra options to pass on to :meth:`apply_async`.
+
+        Raises:
+            celery.exceptions.Retry:
+                To tell the worker that the task has been re-sent for retry.
+                This always happens, unless the `throw` keyword argument
+                has been explicitly set to :const:`False`, and is considered
+                normal operation.
         """
         request = self.request
         retries = request.retries + 1
@@ -670,17 +647,19 @@ class Task(object):
         return ret
 
     def apply(self, args=None, kwargs=None,
-              link=None, link_error=None, **options):
+              link=None, link_error=None,
+              task_id=None, retries=None, throw=None,
+              logfile=None, loglevel=None, headers=None, **options):
         """Execute this task locally, by blocking until the task returns.
 
-        :param args: positional arguments passed on to the task.
-        :param kwargs: keyword arguments passed on to the task.
-        :keyword throw: Re-raise task exceptions.  Defaults to
-                        the :setting:`task_eager_propagates`
-                        setting.
-
-        :rtype :class:`celery.result.EagerResult`:
+        Arguments:
+            args (Tuple): positional arguments passed on to the task.
+            kwargs (Dict): keyword arguments passed on to the task.
+            throw (bool): Re-raise task exceptions.
+                Defaults to the :setting:`task_eager_propagates` setting.
 
+        Returns:
+            celery.result.EagerResult: pre-evaluated result.
         """
         # trace imports Task, so need to import inline.
         from celery.app.trace import build_tracer
@@ -691,22 +670,25 @@ class Task(object):
         if self.__self__ is not None:
             args = (self.__self__,) + tuple(args)
         kwargs = kwargs or {}
-        task_id = options.get('task_id') or uuid()
-        retries = options.get('retries', 0)
-        throw = app.either('task_eager_propagates', options.pop('throw', None))
+        task_id = task_id or uuid()
+        retries = retries or 0
+        if throw is None:
+            throw = app.conf.task_eager_propagates
 
         # Make sure we get the task instance, not class.
         task = app._tasks[self.name]
 
-        request = {'id': task_id,
-                   'retries': retries,
-                   'is_eager': True,
-                   'logfile': options.get('logfile'),
-                   'loglevel': options.get('loglevel', 0),
-                   'callbacks': maybe_list(link),
-                   'errbacks': maybe_list(link_error),
-                   'headers': options.get('headers'),
-                   'delivery_info': {'is_eager': True}}
+        request = {
+            'id': task_id,
+            'retries': retries,
+            'is_eager': True,
+            'logfile': logfile,
+            'loglevel': loglevel or 0,
+            'callbacks': maybe_list(link),
+            'errbacks': maybe_list(link_error),
+            'headers': headers,
+            'delivery_info': {'is_eager': True},
+        }
         tb = None
         tracer = build_tracer(
             task.name, task, eager=True,
@@ -722,8 +704,8 @@ class Task(object):
     def AsyncResult(self, task_id, **kwargs):
         """Get AsyncResult instance for this kind of task.
 
-        :param task_id: Task id to get result for.
-
+        Arguments:
+            task_id (str): Task id to get result for.
         """
         return self._get_app().AsyncResult(task_id, backend=self.backend,
                                            task_name=self.name, **kwargs)
@@ -768,14 +750,15 @@ class Task(object):
         """Replace the current task, with a new task inheriting the
         same task id.
 
-        :param sig: :class:`@signature`
-
         .. versionadded:: 4.0
 
-        Note: This will raise :exc:`~@Ignore`, so the best practice
-        is to always use ``raise self.replace(...)`` to convey
-        to the reader that the task will not continue after being replaced.
+        Arguments:
+            sig (~@Signature): signature to replace with.
 
+        Raises:
+            ~@Ignore: This is always raised, so the best practice
+            is to always use ``raise self.replace(...)`` to convey
+            to the reader that the task will not continue after being replaced.
         """
         chord = self.request.chord
         if 'chord' in sig.options:
@@ -807,14 +790,14 @@ class Task(object):
     def add_to_chord(self, sig, lazy=False):
         """Add signature to the chord the current task is a member of.
 
-        :param sig: Signature to extend chord with.
-        :param lazy: If enabled the new task will not actually be called,
-                      and ``sig.delay()`` must be called manually.
-
         .. versionadded:: 4.0
 
         Currently only supported by the Redis result backend.
 
+        Arguments:
+            sig (~@Signature): Signature to extend chord with.
+            lazy (bool): If enabled the new task will not actually be called,
+                and ``sig.delay()`` must be called manually.
         """
         if not self.request.chord:
             raise ValueError('Current task is not member of any chord')
@@ -827,13 +810,11 @@ class Task(object):
     def update_state(self, task_id=None, state=None, meta=None):
         """Update task state.
 
-        :keyword task_id: Id of the task to update, defaults to the
-                          id of the current task
-        :keyword state: New state (:class:`str`).
-        :keyword meta: State meta-data (:class:`dict`).
-
-
-
+        Arguments:
+            task_id (str): Id of the task to update.
+                Defaults to the id of the current task.
+            state (str): New state.
+            meta (Dict): State meta-data.
         """
         if task_id is None:
             task_id = self.request.id
@@ -844,13 +825,14 @@ class Task(object):
 
         Run by the worker if the task executes successfully.
 
-        :param retval: The return value of the task.
-        :param task_id: Unique id of the executed task.
-        :param args: Original arguments for the executed task.
-        :param kwargs: Original keyword arguments for the executed task.
-
-        The return value of this handler is ignored.
+        Arguments:
+            retval (Any): The return value of the task.
+            task_id (str): Unique id of the executed task.
+            args (Tuple): Original arguments for the executed task.
+            kwargs (Dict): Original keyword arguments for the executed task.
 
+        Returns:
+            None: The return value of this handler is ignored.
         """
         pass
 
@@ -859,16 +841,15 @@ class Task(object):
 
         This is run by the worker when the task is to be retried.
 
-        :param exc: The exception sent to :meth:`retry`.
-        :param task_id: Unique id of the retried task.
-        :param args: Original arguments for the retried task.
-        :param kwargs: Original keyword arguments for the retried task.
-
-        :keyword einfo: :class:`~billiard.einfo.ExceptionInfo`
-                        instance, containing the traceback.
-
-        The return value of this handler is ignored.
+        Arguments:
+            exc (Exception): The exception sent to :meth:`retry`.
+            task_id (str): Unique id of the retried task.
+            args (Tuple): Original arguments for the retried task.
+            kwargs (Dict): Original keyword arguments for the retried task.
+            einfo (~billiard.einfo.ExceptionInfo): Exception information.
 
+        Returns:
+            None: The return value of this handler is ignored.
         """
         pass
 
@@ -877,34 +858,31 @@ class Task(object):
 
         This is run by the worker when the task fails.
 
-        :param exc: The exception raised by the task.
-        :param task_id: Unique id of the failed task.
-        :param args: Original arguments for the task that failed.
-        :param kwargs: Original keyword arguments for the task
-                       that failed.
-
-        :keyword einfo: :class:`~billiard.einfo.ExceptionInfo`
-                        instance, containing the traceback.
-
-        The return value of this handler is ignored.
+        Arguments:
+            exc (Exception): The exception raised by the task.
+            task_id (str): Unique id of the failed task.
+            args (Tuple): Original arguments for the task that failed.
+            kwargs (Dict): Original keyword arguments for the task that failed.
+            einfo (~billiard.einfo.ExceptionInfo): Exception information.
 
+        Returns:
+            None: The return value of this handler is ignored.
         """
         pass
 
     def after_return(self, status, retval, task_id, args, kwargs, einfo):
         """Handler called after the task returns.
 
-        :param status: Current task state.
-        :param retval: Task return value/exception.
-        :param task_id: Unique id of the task.
-        :param args: Original arguments for the task.
-        :param kwargs: Original keyword arguments for the task.
-
-        :keyword einfo: :class:`~billiard.einfo.ExceptionInfo`
-                        instance, containing the traceback (if any).
-
-        The return value of this handler is ignored.
+        Arguments:
+            status (str): Current task state.
+            retval (Any): Task return value/exception.
+            task_id (str): Unique id of the task.
+            args (Tuple): Original arguments for the task.
+            kwargs (Dict): Original keyword arguments for the task.
+            einfo (~billiard.einfo.ExceptionInfo): Exception information.
 
+        Returns:
+            None: The return value of this handler is ignored.
         """
         pass
 

+ 7 - 8
celery/app/trace.py

@@ -1,11 +1,8 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.app.trace
-    ~~~~~~~~~~~~~~~~
-
-    This module defines how the task execution is traced:
-    errors are recorded, handlers are applied and so on.
+"""Trace task execution.
 
+This module defines how the task execution is traced:
+errors are recorded, handlers are applied and so on.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -43,8 +40,10 @@ from celery.utils.serialization import (
 )
 from celery.utils.text import truncate
 
-__all__ = ['TraceInfo', 'build_tracer', 'trace_task',
-           'setup_worker_optimizations', 'reset_worker_optimizations']
+__all__ = [
+    'TraceInfo', 'build_tracer', 'trace_task',
+    'setup_worker_optimizations', 'reset_worker_optimizations',
+]
 
 logger = get_logger(__name__)
 info = logger.info

+ 12 - 17
celery/app/utils.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.app.utils
-    ~~~~~~~~~~~~~~~~
-
-    App utilities: Compat settings, bug-report tool, pickling apps.
-
-"""
+"""App utilities: Compat settings, bug-report tool, pickling apps."""
 from __future__ import absolute_import, unicode_literals
 
 import os
@@ -30,8 +24,10 @@ from .defaults import (
     DEFAULTS, SETTING_KEYS, find,
 )
 
-__all__ = ['Settings', 'appstr', 'bugreport',
-           'filter_hidden_settings', 'find_app']
+__all__ = [
+    'Settings', 'appstr', 'bugreport',
+    'filter_hidden_settings', 'find_app',
+]
 
 #: Format used to generate bug-report information.
 BUGREPORT_INFO = """
@@ -125,16 +121,17 @@ class Settings(ConfigurationView):
     def find_option(self, name, namespace=''):
         """Search for option by name.
 
-        Will return ``(namespace, key, type)`` tuple, e.g.::
-
+        Example:
             >>> from proj.celery import app
             >>> app.conf.find_option('disable_rate_limits')
             ('worker', 'prefetch_multiplier',
              <Option: type->bool default->False>))
 
-        :param name: Name of option, cannot be partial.
-        :keyword namespace: Preferred name-space (``None`` by default).
-
+        Arguments:
+            name (str): Name of option, cannot be partial.
+            namespace (str): Preferred name-space (``None`` by default).
+        Returns:
+            Tuple: of ``(namespace, key, type)``.
         """
         return find(name, namespace)
 
@@ -145,12 +142,10 @@ class Settings(ConfigurationView):
     def get_by_parts(self, *parts):
         """Return the current value for setting specified as a path.
 
-        Example::
-
+        Example:
             >>> from proj.celery import app
             >>> app.conf.get_by_parts('worker', 'disable_rate_limits')
             False
-
         """
         return self['_'.join(part for part in parts if part)]
 

+ 7 - 14
celery/apps/beat.py

@@ -1,14 +1,9 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.apps.beat
-    ~~~~~~~~~~~~~~~~
-
-    This module is the 'program-version' of :mod:`celery.beat`.
-
-    It does everything necessary to run that module
-    as an actual application, like installing signal handlers
-    and so on.
+"""This module is the 'program-version' of :mod:`celery.beat`.
 
+It does everything necessary to run that module
+as an actual application, like installing signal handlers
+and so on.
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
@@ -41,6 +36,7 @@ logger = get_logger('celery.beat')
 
 
 class Beat(object):
+
     Service = beat.Service
     app = None
 
@@ -49,7 +45,6 @@ class Beat(object):
                  loglevel='WARN', logfile=None, schedule=None,
                  scheduler_cls=None, redirect_stdouts=None,
                  redirect_stdouts_level=None, **kwargs):
-        """Starts the beat task scheduler."""
         self.app = app = app or self.app
         either = self.app.either
         self.loglevel = loglevel
@@ -57,11 +52,9 @@ class Beat(object):
         self.schedule = either('beat_schedule_filename', schedule)
         self.scheduler_cls = either('beat_scheduler', scheduler_cls)
         self.redirect_stdouts = either(
-            'worker_redirect_stdouts', redirect_stdouts,
-        )
+            'worker_redirect_stdouts', redirect_stdouts)
         self.redirect_stdouts_level = either(
-            'worker_redirect_stdouts_level', redirect_stdouts_level,
-        )
+            'worker_redirect_stdouts_level', redirect_stdouts_level)
 
         self.max_interval = max_interval
         self.socket_timeout = socket_timeout

+ 6 - 13
celery/apps/worker.py

@@ -1,14 +1,9 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.apps.worker
-    ~~~~~~~~~~~~~~~~~~
-
-    This module is the 'program-version' of :mod:`celery.worker`.
-
-    It does everything necessary to run that module
-    as an actual application, like installing signal handlers,
-    platform tweaks, and so on.
+"""This module is the 'program-version' of :mod:`celery.worker`.
 
+It does everything necessary to run that module
+as an actual application, like installing signal handlers,
+platform tweaks, and so on.
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
@@ -104,11 +99,9 @@ class Worker(WorkController):
                       redirect_stdouts=None, redirect_stdouts_level=None,
                       **kwargs):
         self.redirect_stdouts = self.app.either(
-            'worker_redirect_stdouts', redirect_stdouts,
-        )
+            'worker_redirect_stdouts', redirect_stdouts)
         self.redirect_stdouts_level = self.app.either(
-            'worker_redirect_stdouts_level', redirect_stdouts_level,
-        )
+            'worker_redirect_stdouts_level', redirect_stdouts_level)
         super(Worker, self).setup_defaults(**kwargs)
         self.purge = purge
         self.no_color = no_color

+ 1 - 7
celery/backends/__init__.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.backends
-    ~~~~~~~~~~~~~~~
-
-    Backend abstract factory (...did I just say that?) and alias definitions.
-
-"""
+"""Backend abstract factory (...did I just say that?) and alias definitions."""
 from __future__ import absolute_import, unicode_literals
 
 import sys

+ 2 - 8
celery/backends/amqp.py

@@ -1,12 +1,7 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.backends.amqp``
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    The AMQP result backend.
-
-    This backend publishes results as messages.
+"""The AMQP result backend (DEPRECATED)
 
+This backend publishes results as messages, one queue per task(!)
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -25,7 +20,6 @@ def repair_uuid(s):
 
 
 class AMQPBackend(BaseRPCBackend):
-    """Publishes results by sending messages."""
 
     def __init__(self, *args, **kwargs):
         deprecated.warn(

+ 1 - 7
celery/backends/async.py

@@ -1,10 +1,4 @@
-"""
-    ``celery.backends.async``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Async backend support utilities.
-
-"""
+"""Async I/O backend support utilities."""
 from __future__ import absolute_import, unicode_literals
 
 import socket

+ 10 - 15
celery/backends/base.py

@@ -1,15 +1,10 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.backends.base
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Result backend base classes.
-
-    - :class:`BaseBackend` defines the interface.
+"""Result backend base classes.
 
-    - :class:`KeyValueStoreBackend` is a common base class
-      using K/V semantics like _get and _put.
+- :class:`BaseBackend` defines the interface.
 
+- :class:`KeyValueStoreBackend` is a common base class
+    using K/V semantics like _get and _put.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -72,6 +67,7 @@ class _nulldict(dict):
 
 
 class Backend(object):
+
     READY_STATES = states.READY_STATES
     UNREADY_STATES = states.UNREADY_STATES
     EXCEPTION_STATES = states.EXCEPTION_STATES
@@ -115,8 +111,7 @@ class Backend(object):
 
         self.expires = self.prepare_expires(expires, expires_type)
         self.accept = prepare_accept_content(
-            conf.accept_content if accept is None else accept,
-        )
+            conf.accept_content if accept is None else accept)
         self._pending_results = pending_results_t({}, WeakValueDictionary())
         self._pending_messages = BufferMap(MESSAGE_BUFFER_MAX)
         self.url = url
@@ -444,10 +439,10 @@ class SyncBackendMixin(object):
         If the task raises an exception, this exception
         will be re-raised by :func:`wait_for`.
 
-        If `timeout` is not :const:`None`, this raises the
-        :class:`celery.exceptions.TimeoutError` exception if the operation
-        takes longer than `timeout` seconds.
-
+        Raises:
+            celery.exceptions.TimeoutError:
+                If `timeout` is not :const:`None`, and the operation
+                takes longer than `timeout` seconds.
         """
         self._ensure_not_eager()
 

+ 9 - 14
celery/backends/cache.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.backends.cache``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Memcached and in-memory cache result backend.
-
-"""
+"""Memcached and in-memory cache result backend."""
 from __future__ import absolute_import, unicode_literals
 
 import sys
@@ -85,13 +79,16 @@ class DummyClient(object):
         return self.cache.incr(key, delta)
 
 
-backends = {'memcache': get_best_memcache,
-            'memcached': get_best_memcache,
-            'pylibmc': get_best_memcache,
-            'memory': lambda: (DummyClient, ensure_bytes)}
+backends = {
+    'memcache': get_best_memcache,
+    'memcached': get_best_memcache,
+    'pylibmc': get_best_memcache,
+    'memory': lambda: (DummyClient, ensure_bytes),
+}
 
 
 class CacheBackend(KeyValueStoreBackend):
+
     servers = None
     supports_autoexpire = True
     supports_native_join = True
@@ -132,8 +129,7 @@ class CacheBackend(KeyValueStoreBackend):
     def _apply_chord_incr(self, header, partial_args, group_id, body, **opts):
         self.client.set(self.get_key_for_chord(group_id), 0, time=86400)
         return super(CacheBackend, self)._apply_chord_incr(
-            header, partial_args, group_id, body, **opts
-        )
+            header, partial_args, group_id, body, **opts)
 
     def incr(self, key):
         return self.client.incr(key)
@@ -155,7 +151,6 @@ class CacheBackend(KeyValueStoreBackend):
         """Return the backend as an URI.
 
         This properly handles the case of multiple servers.
-
         """
         servers = ';'.join(self.servers)
         return '{0}://{1}/'.format(self.backend, servers)

+ 64 - 81
celery/backends/cassandra.py

@@ -1,11 +1,5 @@
 # -* coding: utf-8 -*-
-"""
-    ``celery.backends.cassandra``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Apache Cassandra result store backend using DataStax driver
-
-"""
+"""Apache Cassandra result store backend using the DataStax driver."""
 from __future__ import absolute_import, unicode_literals
 
 import sys
@@ -74,9 +68,10 @@ else:
 class CassandraBackend(BaseBackend):
     """Cassandra backend utilizing DataStax driver
 
-    :raises celery.exceptions.ImproperlyConfigured: if
-        module :pypi:`cassandra-driver` is not available.
-
+    Raises:
+        celery.exceptions.ImproperlyConfigured:
+            if module :pypi:`cassandra-driver` is not available,
+            or if the :setting:`cassandra_servers` setting is not set.
     """
 
     #: List of Cassandra servers with format: ``hostname``.
@@ -86,46 +81,34 @@ class CassandraBackend(BaseBackend):
 
     def __init__(self, servers=None, keyspace=None, table=None, entry_ttl=None,
                  port=9042, **kwargs):
-        """Initialize Cassandra backend.
-
-        Raises :class:`celery.exceptions.ImproperlyConfigured` if
-        the :setting:`cassandra_servers` setting is not set.
-
-        """
         super(CassandraBackend, self).__init__(**kwargs)
 
         if not cassandra:
             raise ImproperlyConfigured(E_NO_CASSANDRA)
 
         conf = self.app.conf
-        self.servers = (servers or
-                        conf.get('cassandra_servers', None))
-        self.port = (port or
-                     conf.get('cassandra_port', None))
-        self.keyspace = (keyspace or
-                         conf.get('cassandra_keyspace', None))
-        self.table = (table or
-                      conf.get('cassandra_table', None))
+        self.servers = servers or conf.get('cassandra_servers', None)
+        self.port = port or conf.get('cassandra_port', None)
+        self.keyspace = keyspace or conf.get('cassandra_keyspace', None)
+        self.table = table or conf.get('cassandra_table', None)
 
         if not self.servers or not self.keyspace or not self.table:
             raise ImproperlyConfigured('Cassandra backend not configured.')
 
-        expires = (entry_ttl or conf.get('cassandra_entry_ttl', None))
+        expires = entry_ttl or conf.get('cassandra_entry_ttl', None)
 
-        self.cqlexpires = (Q_EXPIRES.format(expires)
-                           if expires is not None else '')
+        self.cqlexpires = (
+            Q_EXPIRES.format(expires) if expires is not None else '')
 
         read_cons = conf.get('cassandra_read_consistency') or 'LOCAL_QUORUM'
         write_cons = conf.get('cassandra_write_consistency') or 'LOCAL_QUORUM'
 
         self.read_consistency = getattr(
             cassandra.ConsistencyLevel, read_cons,
-            cassandra.ConsistencyLevel.LOCAL_QUORUM,
-        )
+            cassandra.ConsistencyLevel.LOCAL_QUORUM)
         self.write_consistency = getattr(
             cassandra.ConsistencyLevel, write_cons,
-            cassandra.ConsistencyLevel.LOCAL_QUORUM,
-        )
+            cassandra.ConsistencyLevel.LOCAL_QUORUM)
 
         self.auth_provider = None
         auth_provider = conf.get('cassandra_auth_provider', None)
@@ -145,64 +128,64 @@ class CassandraBackend(BaseBackend):
     def process_cleanup(self):
         if self._connection is not None:
             self._connection.shutdown()  # also shuts down _session
-
         self._connection = None
         self._session = None
 
     def _get_connection(self, write=False):
         """Prepare the connection for action
 
-        :param write: bool - are we a writer?
-
+        Arguments:
+            write (bool): are we a writer?
         """
-        if self._connection is None:
-            try:
-                self._connection = cassandra.cluster.Cluster(
-                    self.servers, port=self.port,
-                    auth_provider=self.auth_provider)
-                self._session = self._connection.connect(self.keyspace)
-
-                # We are forced to do concatenation below, as formatting would
-                # blow up on superficial %s that will be processed by Cassandra
-                self._write_stmt = cassandra.query.SimpleStatement(
-                    Q_INSERT_RESULT.format(
-                        table=self.table, expires=self.cqlexpires),
-                )
-                self._write_stmt.consistency_level = self.write_consistency
-
-                self._read_stmt = cassandra.query.SimpleStatement(
-                    Q_SELECT_RESULT.format(table=self.table),
+        if self._connections is not None:
+            return
+        try:
+            self._connection = cassandra.cluster.Cluster(
+                self.servers, port=self.port,
+                auth_provider=self.auth_provider)
+            self._session = self._connection.connect(self.keyspace)
+
+            # We are forced to do concatenation below, as formatting would
+            # blow up on superficial %s that will be processed by Cassandra
+            self._write_stmt = cassandra.query.SimpleStatement(
+                Q_INSERT_RESULT.format(
+                    table=self.table, expires=self.cqlexpires),
+            )
+            self._write_stmt.consistency_level = self.write_consistency
+
+            self._read_stmt = cassandra.query.SimpleStatement(
+                Q_SELECT_RESULT.format(table=self.table),
+            )
+            self._read_stmt.consistency_level = self.read_consistency
+
+            if write:
+                # Only possible writers "workers" are allowed to issue
+                # CREATE TABLE. This is to prevent conflicting situations
+                # where both task-creator and task-executor would issue it
+                # at the same time.
+
+                # Anyway; if you're doing anything critical, you should
+                # have created this table in advance, in which case
+                # this query will be a no-op (AlreadyExists)
+                self._make_stmt = cassandra.query.SimpleStatement(
+                    Q_CREATE_RESULT_TABLE.format(table=self.table),
                 )
-                self._read_stmt.consistency_level = self.read_consistency
-
-                if write:
-                    # Only possible writers "workers" are allowed to issue
-                    # CREATE TABLE. This is to prevent conflicting situations
-                    # where both task-creator and task-executor would issue it
-                    # at the same time.
-
-                    # Anyway; if you're doing anything critical, you should
-                    # have created this table in advance, in which case
-                    # this query will be a no-op (AlreadyExists)
-                    self._make_stmt = cassandra.query.SimpleStatement(
-                        Q_CREATE_RESULT_TABLE.format(table=self.table),
-                    )
-                    self._make_stmt.consistency_level = self.write_consistency
-
-                    try:
-                        self._session.execute(self._make_stmt)
-                    except cassandra.AlreadyExists:
-                        pass
-
-            except cassandra.OperationTimedOut:
-                # a heavily loaded or gone Cassandra cluster failed to respond.
-                # leave this class in a consistent state
-                if self._connection is not None:
-                    self._connection.shutdown()     # also shuts down _session
-
-                self._connection = None
-                self._session = None
-                raise   # we did fail after all - reraise
+                self._make_stmt.consistency_level = self.write_consistency
+
+                try:
+                    self._session.execute(self._make_stmt)
+                except cassandra.AlreadyExists:
+                    pass
+
+        except cassandra.OperationTimedOut:
+            # a heavily loaded or gone Cassandra cluster failed to respond.
+            # leave this class in a consistent state
+            if self._connection is not None:
+                self._connection.shutdown()     # also shuts down _session
+
+            self._connection = None
+            self._session = None
+            raise   # we did fail after all - reraise
 
     def _store_result(self, task_id, result, state,
                       traceback=None, request=None, **kwargs):

+ 3 - 9
celery/backends/consul.py

@@ -1,13 +1,8 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.backends.consul
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Consul result store backend.
-
-    - :class:`ConsulBackend` implements KeyValueStoreBackend to store results
-      in the key-value store of Consul.
+"""Consul result store backend.
 
+- :class:`ConsulBackend` implements KeyValueStoreBackend to store results
+    in the key-value store of Consul.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -84,7 +79,6 @@ class ConsulBackend(KeyValueStoreBackend):
 
         If the session expires it will remove the key so that results
         can auto expire from the K/V store
-
         """
         session_name = key
         if PY3:

+ 5 - 10
celery/backends/couchbase.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.backends.couchbase``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Couchbase result store backend.
-
-"""
+"""Couchbase result store backend."""
 from __future__ import absolute_import, unicode_literals
 
 import logging
@@ -30,10 +24,11 @@ __all__ = ['CouchbaseBackend']
 class CouchbaseBackend(KeyValueStoreBackend):
     """Couchbase backend.
 
-    :raises celery.exceptions.ImproperlyConfigured: if
-        module :pypi:`couchbase` is not available.
-
+    Raises:
+        celery.exceptions.ImproperlyConfigured:
+            if module :pypi:`couchbase` is not available.
     """
+
     bucket = 'default'
     host = 'localhost'
     port = 8091

+ 10 - 15
celery/backends/couchdb.py

@@ -1,24 +1,18 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.backends.couchdb``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    CouchDB result store backend.
-
-"""
+"""CouchDB result store backend."""
 from __future__ import absolute_import, unicode_literals
 
-try:
-    import pycouchdb
-except ImportError:
-    pycouchdb = None  # noqa
-
 from kombu.utils.url import _parse_url
 
 from celery.exceptions import ImproperlyConfigured
 
 from .base import KeyValueStoreBackend
 
+try:
+    import pycouchdb
+except ImportError:
+    pycouchdb = None  # noqa
+
 __all__ = ['CouchBackend']
 
 ERR_LIB_MISSING = """\
@@ -29,10 +23,11 @@ You need to install the pycouchdb library to use the CouchDB result backend\
 class CouchBackend(KeyValueStoreBackend):
     """CouchDB backend.
 
-    :raises celery.exceptions.ImproperlyConfigured: if
-        module :pypi:`pycouchdb` is not available.
-
+    Raises:
+        celery.exceptions.ImproperlyConfigured:
+            if module :pypi:`pycouchdb` is not available.
     """
+
     container = 'default'
     scheme = 'http'
     host = 'localhost'

+ 5 - 17
celery/backends/database/__init__.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.backends.database
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    SQLAlchemy result store backend.
-
-"""
+"""SQLAlchemy result store backend."""
 from __future__ import absolute_import, unicode_literals
 
 import logging
@@ -61,8 +55,7 @@ def retry(fun):
                 logger.warning(
                     'Failed operation %s. Retrying %s more times.',
                     fun.__name__, max_retries - retries - 1,
-                    exc_info=True,
-                )
+                    exc_info=True)
                 if retries + 1 >= max_retries:
                     raise
 
@@ -79,10 +72,7 @@ class DatabaseBackend(BaseBackend):
         # The `url` argument was added later and is used by
         # the app to set backend by url (celery.backends.get_backend_by_url)
         super(DatabaseBackend, self).__init__(
-            expires_type=maybe_timedelta,
-            url=url,
-            **kwargs
-        )
+            expires_type=maybe_timedelta, url=url, **kwargs)
         conf = self.app.conf
         self.url = url or dburi or conf.sqlalchemy_dburi
         self.engine_options = dict(
@@ -90,8 +80,7 @@ class DatabaseBackend(BaseBackend):
             **conf.sqlalchemy_engine_options or {})
         self.short_lived_sessions = kwargs.get(
             'short_lived_sessions',
-            conf.sqlalchemy_short_lived_sessions,
-        )
+            conf.sqlalchemy_short_lived_sessions)
 
         tablenames = conf.sqlalchemy_table_names or {}
         Task.__table__.name = tablenames.get('task', 'celery_taskmeta')
@@ -106,8 +95,7 @@ class DatabaseBackend(BaseBackend):
         return session_manager.session_factory(
             dburi=self.url,
             short_lived_sessions=self.short_lived_sessions,
-            **self.engine_options
-        )
+            **self.engine_options)
 
     @retry
     def _store_result(self, task_id, result, state,

+ 16 - 17
celery/backends/database/models.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.backends.database.models
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Database tables for the SQLAlchemy result store backend.
-
-"""
+"""Database models used by the SQLAlchemy result store backend."""
 from __future__ import absolute_import, unicode_literals
 
 from datetime import datetime
@@ -24,12 +18,12 @@ __all__ = ['Task', 'TaskSet']
 @python_2_unicode_compatible
 class Task(ResultModelBase):
     """Task result/status."""
+
     __tablename__ = 'celery_taskmeta'
     __table_args__ = {'sqlite_autoincrement': True}
 
     id = sa.Column(sa.Integer, sa.Sequence('task_id_sequence'),
-                   primary_key=True,
-                   autoincrement=True)
+                   primary_key=True, autoincrement=True)
     task_id = sa.Column(sa.String(155), unique=True)
     status = sa.Column(sa.String(50), default=states.PENDING)
     result = sa.Column(PickleType, nullable=True)
@@ -41,11 +35,13 @@ class Task(ResultModelBase):
         self.task_id = task_id
 
     def to_dict(self):
-        return {'task_id': self.task_id,
-                'status': self.status,
-                'result': self.result,
-                'traceback': self.traceback,
-                'date_done': self.date_done}
+        return {
+            'task_id': self.task_id,
+            'status': self.status,
+            'result': self.result,
+            'traceback': self.traceback,
+            'date_done': self.date_done,
+        }
 
     def __repr__(self):
         return '<Task {0.task_id} state: {0.status}>'.format(self)
@@ -54,6 +50,7 @@ class Task(ResultModelBase):
 @python_2_unicode_compatible
 class TaskSet(ResultModelBase):
     """TaskSet result"""
+
     __tablename__ = 'celery_tasksetmeta'
     __table_args__ = {'sqlite_autoincrement': True}
 
@@ -69,9 +66,11 @@ class TaskSet(ResultModelBase):
         self.result = result
 
     def to_dict(self):
-        return {'taskset_id': self.taskset_id,
-                'result': self.result,
-                'date_done': self.date_done}
+        return {
+            'taskset_id': self.taskset_id,
+            'result': self.result,
+            'date_done': self.date_done,
+        }
 
     def __repr__(self):
         return '<TaskSet: {0.taskset_id}>'.format(self)

+ 1 - 7
celery/backends/database/session.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.backends.database.session
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    SQLAlchemy sessions.
-
-"""
+"""SQLAlchemy session."""
 from __future__ import absolute_import, unicode_literals
 
 from sqlalchemy import create_engine

+ 4 - 10
celery/backends/elasticsearch.py

@@ -1,11 +1,5 @@
 # -* coding: utf-8 -*-
-"""
-    ``celery.backends.elasticsearch``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Elasticsearch result store backend.
-
-"""
+"""Elasticsearch result store backend."""
 from __future__ import absolute_import, unicode_literals
 
 from datetime import datetime
@@ -32,9 +26,9 @@ result backend.\
 class ElasticsearchBackend(KeyValueStoreBackend):
     """Elasticsearch Backend.
 
-    :raises celery.exceptions.ImproperlyConfigured: if
-        module :pypi:`elasticsearch` is not available.
-
+    Raises:
+        celery.exceptions.ImproperlyConfigured:
+            if module :pypi:`elasticsearch` is not available.
     """
 
     index = 'celery'

+ 7 - 15
celery/backends/filesystem.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.backends.filesystem``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    File-system result store backend.
-
-"""
+"""File-system result store backend."""
 from __future__ import absolute_import, unicode_literals
 
 from kombu.utils.encoding import ensure_bytes
@@ -35,14 +29,12 @@ the correct permissions.\
 class FilesystemBackend(KeyValueStoreBackend):
     """File-system result backend.
 
-    Keyword arguments (in addition to those of KeyValueStoreBackend):
-
-    :param url:  URL to the directory we should use
-    :param open: open function to use when opening files
-    :param unlink: unlink function to use when deleting files
-    :param sep: directory separator (to join the directory with the key)
-    :param encoding: encoding used on the file-system
-
+    Arguments:
+        url (str):  URL to the directory we should use
+        open (Callable): open function to use when opening files
+        unlink (Callable): unlink function to use when deleting files
+        sep (str): directory separator (to join the directory with the key)
+        encoding (str): encoding used on the file-system
     """
 
     def __init__(self, url=None, open=open, unlink=os.unlink, sep=os.sep,

+ 30 - 40
celery/backends/mongodb.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.backends.mongodb``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    MongoDB result store backend.
-
-"""
+"""MongoDB result store backend."""
 from __future__ import absolute_import, unicode_literals
 
 from datetime import datetime, timedelta
@@ -42,9 +36,9 @@ __all__ = ['MongoBackend']
 class MongoBackend(BaseBackend):
     """MongoDB result backend.
 
-    :raises celery.exceptions.ImproperlyConfigured: if
-        module :pypi:`pymongo` is not available.
-
+    Raises:
+        celery.exceptions.ImproperlyConfigured:
+            if module :pypi:`pymongo` is not available.
     """
 
     mongo_host = None
@@ -169,15 +163,16 @@ class MongoBackend(BaseBackend):
     def _store_result(self, task_id, result, state,
                       traceback=None, request=None, **kwargs):
         """Store return value and state of an executed task."""
-
-        meta = {'_id': task_id,
-                'status': state,
-                'result': self.encode(result),
-                'date_done': datetime.utcnow(),
-                'traceback': self.encode(traceback),
-                'children': self.encode(
-                    self.current_task_children(request),
-                )}
+        meta = {
+            '_id': task_id,
+            'status': state,
+            'result': self.encode(result),
+            'date_done': datetime.utcnow(),
+            'traceback': self.encode(traceback),
+            'children': self.encode(
+                self.current_task_children(request),
+            ),
+        }
 
         try:
             self.collection.save(meta)
@@ -202,27 +197,24 @@ class MongoBackend(BaseBackend):
 
     def _save_group(self, group_id, result):
         """Save the group result."""
-
-        task_ids = [i.id for i in result]
-
-        meta = {'_id': group_id,
-                'result': self.encode(task_ids),
-                'date_done': datetime.utcnow()}
-        self.group_collection.save(meta)
-
+        self.group_collection.save({
+            '_id': group_id,
+            'result': self.encode([i.id for i in result]),
+            'date_done': datetime.utcnow(),
+        })
         return result
 
     def _restore_group(self, group_id):
         """Get the result for a group by id."""
         obj = self.group_collection.find_one({'_id': group_id})
         if obj:
-            tasks = [self.app.AsyncResult(task)
-                     for task in self.decode(obj['result'])]
-
             return {
                 'task_id': obj['_id'],
-                'result': tasks,
                 'date_done': obj['date_done'],
+                'result': [
+                    self.app.AsyncResult(task)
+                    for task in self.decode(obj['result'])
+                ],
             }
 
     def _delete_group(self, group_id):
@@ -232,9 +224,9 @@ class MongoBackend(BaseBackend):
     def _forget(self, task_id):
         """Remove result from MongoDB.
 
-        :raises celery.exceptions.OperationsError:
-            if the task_id could not be removed.
-
+        Raises:
+            pymongo.exceptions.OperationsError:
+                if the task_id could not be removed.
         """
         # By using safe=True, this will wait until it receives a response from
         # the server.  Likewise, it will raise an OperationsError if the
@@ -252,15 +244,13 @@ class MongoBackend(BaseBackend):
 
     def __reduce__(self, args=(), kwargs={}):
         return super(MongoBackend, self).__reduce__(
-            args, dict(kwargs, expires=self.expires, url=self.url),
-        )
+            args, dict(kwargs, expires=self.expires, url=self.url))
 
     def _get_database(self):
         conn = self._get_connection()
         db = conn[self.database_name]
         if self.user and self.password:
-            if not db.authenticate(self.user,
-                                   self.password):
+            if not db.authenticate(self.user, self.password):
                 raise ImproperlyConfigured(
                     'Invalid MongoDB username or password.')
         return db
@@ -298,8 +288,8 @@ class MongoBackend(BaseBackend):
     def as_uri(self, include_password=False):
         """Return the backend as an URI.
 
-        :keyword include_password: Censor passwords.
-
+        Arguments:
+            include_password (bool): Password censored if disabled.
         """
         if not self.url:
             return 'mongodb://'

+ 4 - 13
celery/backends/redis.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.backends.redis``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Redis result store backend.
-
-"""
+"""Redis result store backend."""
 from __future__ import absolute_import, unicode_literals
 
 from functools import partial
@@ -120,14 +114,12 @@ class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
             raise ImproperlyConfigured(REDIS_MISSING)
 
         if host and '://' in host:
-            url = host
-            host = None
+            url, host = host, None
 
         self.max_connections = (
             max_connections or
             _get('redis_max_connections') or
-            self.max_connections
-        )
+            self.max_connections)
         self._ConnectionPool = connection_pool
 
         self.connparams = {
@@ -196,8 +188,7 @@ class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin):
         return retry_over_time(
             fun, self.connection_errors, args, {},
             partial(self.on_connection_error, max_retries),
-            **retry_policy
-        )
+            **retry_policy)
 
     def on_connection_error(self, max_retries, exc, intervals, retries):
         tts = next(intervals)

+ 5 - 10
celery/backends/riak.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.backends.riak``
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Riak result store backend.
-
-"""
+"""Riak result store backend."""
 from __future__ import absolute_import, unicode_literals
 
 import sys
@@ -52,10 +46,11 @@ def is_ascii(s):
 class RiakBackend(KeyValueStoreBackend):
     """Riak result backend.
 
-    :raises celery.exceptions.ImproperlyConfigured: if
-        module :pypi:`riak` is not available.
-
+    Raises:
+        celery.exceptions.ImproperlyConfigured:
+            if module :pypi:`riak` is not available.
     """
+
     # TODO: allow using other protocols than protobuf ?
     #: default protocol used to connect to Riak, might be `http` or `pbc`
     protocol = 'pbc'

+ 10 - 12
celery/backends/rpc.py

@@ -1,10 +1,7 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.backends.rpc``
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    RPC-style result backend, using reply-to and one queue per client.
+"""The ``RPC`` result backend for AMQP brokers.
 
+RPC-style result backend, using reply-to and one queue per client.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -247,7 +244,8 @@ class BaseRPCBackend(base.Backend, AsyncBackendMixin):
             'delete_group is not supported by this backend.')
 
     def __reduce__(self, args=(), kwargs={}):
-        kwargs.update(
+        return super(BaseRPCBackend, self).__reduce__(args, dict(
+            kwargs,
             connection=self._connection,
             exchange=self.exchange.name,
             exchange_type=self.exchange.type,
@@ -255,8 +253,7 @@ class BaseRPCBackend(base.Backend, AsyncBackendMixin):
             serializer=self.serializer,
             auto_delete=self.auto_delete,
             expires=self.expires,
-        )
-        return super(BaseRPCBackend, self).__reduce__(args, kwargs)
+        ))
 
 
 class RPCBackend(BaseRPCBackend):
@@ -286,8 +283,7 @@ class RPCBackend(BaseRPCBackend):
             request = request or current_task.request
         except AttributeError:
             raise RuntimeError(
-                'RPC backend missing task request for {0!r}'.format(task_id),
-            )
+                'RPC backend missing task request for {0!r}'.format(task_id))
         return request.reply_to, request.correlation_id or task_id
 
     def on_reply_declare(self, task_id):
@@ -301,8 +297,10 @@ class RPCBackend(BaseRPCBackend):
 
     @property
     def binding(self):
-        return self.Queue(self.oid, self.exchange, self.oid,
-                          durable=False, auto_delete=True)
+        return self.Queue(
+            self.oid, self.exchange, self.oid,
+            durable=False, auto_delete=True
+        )
 
     @cached_property
     def oid(self):

+ 29 - 31
celery/beat.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.beat
-    ~~~~~~~~~~~
-
-    The periodic task scheduler.
-
-"""
+"""The periodic task scheduler."""
 from __future__ import absolute_import, unicode_literals
 
 import errno
@@ -60,15 +54,15 @@ class SchedulingError(Exception):
 class ScheduleEntry(object):
     """An entry in the scheduler.
 
-    :keyword name: see :attr:`name`.
-    :keyword schedule: see :attr:`schedule`.
-    :keyword args: see :attr:`args`.
-    :keyword kwargs: see :attr:`kwargs`.
-    :keyword options: see :attr:`options`.
-    :keyword last_run_at: see :attr:`last_run_at`.
-    :keyword total_run_count: see :attr:`total_run_count`.
-    :keyword relative: Is the time relative to when the server starts?
-
+    Arguments:
+        name (str): see :attr:`name`.
+        schedule (~celery.schedules.schedule): see :attr:`schedule`.
+        args (Tuple): see :attr:`args`.
+        kwargs (Dict): see :attr:`kwargs`.
+        options (Dict): see :attr:`options`.
+        last_run_at (~datetime.datetime): see :attr:`last_run_at`.
+        total_run_count (int): see :attr:`total_run_count`.
+        relative (bool): Is the time relative to when the server starts?
     """
 
     #: The task name
@@ -127,13 +121,14 @@ class ScheduleEntry(object):
     def update(self, other):
         """Update values from another entry.
 
-        Does only update "editable" fields (task, schedule, args, kwargs,
-        options).
-
+        Will only update "editable" fields:
+            ``task``, ``schedule``, ``args``, ``kwargs``, ``options``.
         """
-        self.__dict__.update({'task': other.task, 'schedule': other.schedule,
-                              'args': other.args, 'kwargs': other.kwargs,
-                              'options': other.options})
+        self.__dict__.update({
+            'task': other.task, 'schedule': other.schedule,
+            'args': other.args, 'kwargs': other.kwargs,
+            'options': other.options,
+        })
 
     def is_due(self):
         """See :meth:`~celery.schedule.schedule.is_due`."""
@@ -143,9 +138,10 @@ class ScheduleEntry(object):
         return iter(items(vars(self)))
 
     def __repr__(self):
-        return '<Entry: {0.name} {call} {0.schedule}'.format(
+        return '<{name} {0.name} {call} {0.schedule}'.format(
             self,
             call=reprcall(self.task, self.args or (), self.kwargs or {}),
+            name=type(self).__name__,
         )
 
     def __lt__(self, other):
@@ -168,11 +164,12 @@ class Scheduler(object):
     ``lazy`` argument set.  It is important for subclasses to
     be idempotent when this argument is set.
 
-    :keyword schedule: see :attr:`schedule`.
-    :keyword max_interval: see :attr:`max_interval`.
-    :keyword lazy: Do not set up the schedule.
-
+    Arguments:
+        schedule (~celery.schedules.schedule): see :attr:`schedule`.
+        max_interval (int): see :attr:`max_interval`.
+        lazy (bool): Do not set up the schedule.
     """
+
     Entry = ScheduleEntry
 
     #: The schedule dict/shelve.
@@ -243,7 +240,8 @@ class Scheduler(object):
 
         Executes one due task per call.
 
-        Returns preferred delay in seconds for next call.
+        Returns:
+            float: preferred delay in seconds for next call.
         """
 
         def _when(entry, next_time_to_run):
@@ -609,9 +607,9 @@ else:
 def EmbeddedService(app, max_interval=None, **kwargs):
     """Return embedded clock service.
 
-    :keyword thread: Run threaded instead of as a separate process.
-        Uses :mod:`multiprocessing` by default, if available.
-
+    Arguments:
+        thread (bool): Run threaded instead of as a separate process.
+            Uses :mod:`multiprocessing` by default, if available.
     """
     if kwargs.pop('thread', False) or _Process is None:
         # Need short max interval to be able to stop thread

+ 38 - 50
celery/bin/amqp.py

@@ -1,9 +1,7 @@
 # -*- coding: utf-8 -*-
-"""
-The :program:`celery amqp` command.
+"""The :program:`celery amqp` command.
 
 .. program:: celery amqp
-
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
@@ -47,20 +45,19 @@ class Spec(object):
     Used to convert arguments to Python values and display various help
     and tool-tips.
 
-    :param args: see :attr:`args`.
-    :keyword returns: see :attr:`returns`.
-
-    .. attribute args::
-
-        List of arguments this command takes. Should
-        contain `(argument_name, argument_type)` tuples.
+    Arguments:
+        args (Sequence): see :attr:`args`.
+        returns (str): see :attr:`returns`.
+    """
 
-    .. attribute returns:
+    #: List of arguments this command takes.
+    #: Should contain ``(argument_name, argument_type)`` tuples.
+    args = None
 
-        Helpful human string representation of what this command returns.
-        May be :const:`None`, to signify the return type is unknown.
+    #: Helpful human string representation of what this command returns.
+    #: May be :const:`None`, to signify the return type is unknown.
+    returns = None
 
-    """
     def __init__(self, *args, **kwargs):
         self.args = args
         self.returns = kwargs.get('returns')
@@ -76,12 +73,11 @@ class Spec(object):
     def str_args_to_python(self, arglist):
         """Process list of string arguments to values according to spec.
 
-        e.g:
+        e.g::
 
             >>> spec = Spec([('queue', str), ('if_unused', bool)])
             >>> spec.str_args_to_python('pobox', 'true')
             ('pobox', True)
-
         """
         return tuple(
             self.coerce(index, value) for index, value in enumerate(arglist))
@@ -119,21 +115,11 @@ def format_declare_queue(ret):
 class AMQShell(cmd.Cmd):
     """AMQP API Shell.
 
-    :keyword connect: Function used to connect to the server, must return
-        connection object.
-
-    :keyword silent: If :const:`True`, the commands won't have annoying
-                     output not relevant when running in non-shell mode.
-
-
-    .. attribute: builtins
-
-        Mapping of built-in command names -> method names
-
-    .. attribute:: amqp
-
-        Mapping of AMQP API commands and their :class:`Spec`.
-
+    Arguments:
+        connect (Callable): Function used to connect to the server.
+            Must return :class:`kombu.Connection` object.
+        silent (bool): If enabled, the commands won't have annoying
+            output not relevant when running in non-shell mode.
     """
     conn = None
     chan = None
@@ -143,10 +129,14 @@ class AMQShell(cmd.Cmd):
     counter = 1
     inc_counter = count(2)
 
-    builtins = {'EOF': 'do_exit',
-                'exit': 'do_exit',
-                'help': 'do_help'}
+    #: Map of built-in command names -> method names
+    builtins = {
+        'EOF': 'do_exit',
+        'exit': 'do_exit',
+        'help': 'do_help',
+    }
 
+    #: Map of AMQP API commands and their :class:`Spec`.
     amqp = {
         'exchange.declare': Spec(('exchange', str),
                                  ('type', str),
@@ -212,8 +202,8 @@ class AMQShell(cmd.Cmd):
         to Python values and find the corresponding method on the AMQP channel
         object.
 
-        :returns: tuple of `(method, processed_args)`.
-
+        Returns:
+            Tuple: of `(method, processed_args)` pairs.
         """
         spec = self.amqp[cmd]
         args = spec.str_args_to_python(arglist)
@@ -260,7 +250,6 @@ class AMQShell(cmd.Cmd):
         """Dispatch and execute the command.
 
         Look-up order is: :attr:`builtins` -> :attr:`amqp`.
-
         """
         if isinstance(arglist, string_t):
             arglist = shlex.split(safe_str(arglist))
@@ -272,9 +261,9 @@ class AMQShell(cmd.Cmd):
     def parseline(self, parts):
         """Parse input line.
 
-        :returns: tuple of three items:
-            `(command_name, arglist, original_line)`
-
+        Returns:
+            Tuple: of three items:
+                `(command_name, arglist, original_line)`
         """
         if parts:
             return parts[0], parts[1:], ' '.join(parts)
@@ -319,6 +308,7 @@ class AMQShell(cmd.Cmd):
 
 class AMQPAdmin(object):
     """The celery :program:`celery amqp` utility."""
+
     Shell = AMQShell
 
     def __init__(self, *args, **kwargs):
@@ -358,18 +348,16 @@ class amqp(Command):
     store declarations in memory).
 
     Examples:
+        .. code-block:: console
 
-    .. code-block:: console
-
-        $ celery amqp
-            start shell mode
-        $ celery amqp help
-            show list of commands
-
-        $ celery amqp exchange.delete name
-        $ celery amqp queue.delete queue
-        $ celery amqp queue.delete queue yes yes
+            $ # start shell mode
+            $ celery amqp
+            $ # show list of commands
+            $ celery amqp help
 
+            $ celery amqp exchange.delete name
+            $ celery amqp queue.delete queue
+            $ celery amqp queue.delete queue yes yes
     """
 
     def run(self, *args, **options):

+ 20 - 16
celery/bin/base.py

@@ -1,4 +1,5 @@
 # -*- coding: utf-8 -*-
+"""Base command-line interface."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
@@ -113,10 +114,12 @@ class HelpFormatter(IndentedHelpFormatter):
 class Command(object):
     """Base class for command-line applications.
 
-    :keyword app: The current app.
-    :keyword get_app: Callable returning the current app if no app provided.
-
+    Arguments:
+        app (~@Celery): The app to use.
+        get_app (Callable): Fucntion returning the current app
+            when no app provided.
     """
+
     Error = Error
     UsageError = UsageError
     Parser = OptionParser
@@ -231,9 +234,9 @@ class Command(object):
     def execute_from_commandline(self, argv=None):
         """Execute application from command-line.
 
-        :keyword argv: The list of command-line arguments.
-                       Defaults to ``sys.argv``.
-
+        Arguments:
+            argv (List[str]): The list of command-line arguments.
+                Defaults to ``sys.argv``.
         """
         if argv is None:
             argv = list(sys.argv)
@@ -276,15 +279,15 @@ class Command(object):
     def ask(self, q, choices, default=None):
         """Prompt user to choose from a tuple of string values.
 
-        :param q: the question to ask (do not include questionark)
-        :param choice: tuple of possible choices, must be lowercase.
-        :param default: Default value if any.
-
         If a default is not specified the question will be repeated
         until the user gives a valid choice.
 
-        Matching is done case insensitively.
+        Matching is case insensitive.
 
+        Arguments:
+            q (str): the question to ask (do not include questionark)
+            choice (Tuple[str]): tuple of possible choices, must be lowercase.
+            default (Any): Default value if any.
         """
         schoices = choices
         if default is not None:
@@ -305,12 +308,13 @@ class Command(object):
         """Parse command-line arguments from ``argv`` and dispatch
         to :meth:`run`.
 
-        :param prog_name: The program name (``argv[0]``).
-        :param argv: Command arguments.
-
-        Exits with an error message if :attr:`supports_args` is disabled
-        and ``argv`` contains positional arguments.
+        Warning:
+            Exits with an error message if :attr:`supports_args` is disabled
+            and ``argv`` contains positional arguments.
 
+        Arguments:
+            prog_name (str): The program name (``argv[0]``).
+            argv (List[str]): Rest of command-line arguments.
         """
         options, args = self.prepare_args(
             *self.parse_options(prog_name, argv, command))

+ 6 - 9
celery/bin/beat.py

@@ -1,7 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-
-The :program:`celery beat` command.
+"""The :program:`celery beat` command.
 
 .. program:: celery beat
 
@@ -65,7 +63,6 @@ The :program:`celery beat` command.
 .. cmdoption:: --executable
 
     Executable to use for the detached process.
-
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -81,12 +78,12 @@ __all__ = ['beat']
 class beat(Command):
     """Start the beat periodic task scheduler.
 
-    Examples::
-
-        celery beat -l info
-        celery beat -s /var/run/celery/beat-schedule --detach
-        celery beat -S djcelery.schedulers.DatabaseScheduler
+    Examples:
+        .. code-block:: console
 
+            $ celery beat -l info
+            $ celery beat -s /var/run/celery/beat-schedule --detach
+            $ celery beat -S djcelery.schedulers.DatabaseScheduler
     """
     doc = __doc__
     enable_config_from_cmdline = True

+ 96 - 72
celery/bin/celery.py

@@ -1,7 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-
-The :program:`celery` umbrella command.
+"""The :program:`celery` umbrella command.
 
 .. program:: celery
 
@@ -255,7 +253,6 @@ in any command that also has a `--detach` option.
 .. cmdoption:: --routing-key
 
     Destination routing key (defaults to the queue routing key).
-
 """
 from __future__ import absolute_import, unicode_literals, print_function
 
@@ -355,12 +352,15 @@ class multi(Command):
 class list_(Command):
     """Get info from broker.
 
-    Examples::
+    Note:
+       For RabbitMQ the management plugin is required.
 
-        celery list bindings
+    Example:
+        .. code-block:: console
 
-    NOTE: For RabbitMQ the management plugin is required.
+            $ celery list bindings
     """
+
     args = '[bindings]'
 
     def list_bindings(self, management):
@@ -394,12 +394,15 @@ class list_(Command):
 class call(Command):
     """Call a task by name.
 
-    Examples::
+    Examples:
+        .. code-block:: console
 
-        celery call tasks.add --args='[2, 2]'
-        celery call tasks.add --args='[2, 2]' --countdown=10
+            $ celery call tasks.add --args='[2, 2]'
+            $ celery call tasks.add --args='[2, 2]' --countdown=10
     """
+
     args = '<task_name>'
+
     option_list = Command.option_list + (
         Option('--args', '-a', help='positional arguments (json).'),
         Option('--kwargs', '-k', help='keyword arguments (json).'),
@@ -413,19 +416,18 @@ class call(Command):
         Option('--routing-key', help='custom routing key.'),
     )
 
-    def run(self, name, *_, **kw):
-        # Positional args.
-        args = kw.get('args') or ()
-        if isinstance(args, string_t):
-            args = json.loads(args)
+    def run(self, name, *_, **kwargs):
+        self._send_task(name, **kwargs)
 
-        # Keyword args.
-        kwargs = kw.get('kwargs') or {}
-        if isinstance(kwargs, string_t):
-            kwargs = json.loads(kwargs)
+    def _send_task(self, name, args=None, kwargs=None,
+                   countdown=None, serializer=None,
+                   queue=None, exchange=None, routing_key=None,
+                   eta=None, expires=None):
+        # arguments
+        args = json.loads(args) if isinstance(args, string_t) else args
+        kwargs = json.loads(kwargs) if isinstance(kwargs, string_t) else kwargs
 
         # Expires can be int/float.
-        expires = kw.get('expires') or None
         try:
             expires = float(expires)
         except (TypeError, ValueError):
@@ -435,31 +437,37 @@ class call(Command):
             except (TypeError, ValueError):
                 raise
 
-        res = self.app.send_task(name, args=args, kwargs=kwargs,
-                                 countdown=kw.get('countdown'),
-                                 serializer=kw.get('serializer'),
-                                 queue=kw.get('queue'),
-                                 exchange=kw.get('exchange'),
-                                 routing_key=kw.get('routing_key'),
-                                 eta=maybe_iso8601(kw.get('eta')),
-                                 expires=expires)
-        self.out(res.id)
+        # send the task and print the id.
+        self.out(self.app.send_task(
+            name,
+            args=args or (), kwargs=kwargs or {},
+            countdown=countdown,
+            serializer=serializer,
+            queue=queue,
+            exchange=exchange,
+            routing_key=routing_key,
+            eta=maybe_iso8601(eta),
+            expires=expires,
+        ).id)
 
 
 class purge(Command):
     """Erase all messages from all known task queues.
 
-    WARNING: There is no undo operation for this command.
-
+    Warning:
+        There's no undo operation for this command.
     """
+
     warn_prelude = (
         '{warning}: This will remove all tasks from {queues}: {names}.\n'
         '         There is no undo for this operation!\n\n'
         '(to skip this prompt use the -f option)\n'
     )
     warn_prompt = 'Are you sure you want to delete all tasks'
+
     fmt_purged = 'Purged {mnum} {messages} from {qnum} known task {queues}.'
     fmt_empty = 'No messages purged from {qnum} {queues}'
+
     option_list = Command.option_list + (
         Option('--force', '-f', action='store_true',
                help='Do not prompt for verification'),
@@ -503,14 +511,16 @@ class purge(Command):
 class result(Command):
     """Gives the return value for a given task id.
 
-    Examples::
-
-        celery result 8f511516-e2f5-4da4-9d2f-0fb83a86e500
-        celery result 8f511516-e2f5-4da4-9d2f-0fb83a86e500 -t tasks.add
-        celery result 8f511516-e2f5-4da4-9d2f-0fb83a86e500 --traceback
+    Examples:
+        .. code-block:: console
 
+            $ celery result 8f511516-e2f5-4da4-9d2f-0fb83a86e500
+            $ celery result 8f511516-e2f5-4da4-9d2f-0fb83a86e500 -t tasks.add
+            $ celery result 8f511516-e2f5-4da4-9d2f-0fb83a86e500 --traceback
     """
+
     args = '<task_id>'
+
     option_list = Command.option_list + (
         Option('--task', '-t', help='name of task (if custom backend)'),
         Option('--traceback', action='store_true',
@@ -533,9 +543,11 @@ class result(Command):
 
 
 class _RemoteControl(Command):
+
     name = None
     choices = None
     leaf = False
+
     option_list = Command.option_list + (
         Option('--timeout', '-t', type='float',
                help='Timeout in seconds (float) waiting for reply'),
@@ -635,14 +647,16 @@ class inspect(_RemoteControl):
 
     Availability: RabbitMQ (AMQP) transport.
 
-    Examples::
-
-        celery inspect active --timeout=5
-        celery inspect scheduled -d worker1@example.com
-        celery inspect revoked -d w1@e.com,w2@e.com
+    Examples:
+        .. code-block:: console
 
+            $ celery inspect active --timeout=5
+            $ celery inspect scheduled -d worker1@example.com
+            $ celery inspect revoked -d w1@e.com,w2@e.com
     """
+
     name = 'inspect'
+
     choices = {
         'active': (1.0, 'dump active tasks (being processed)'),
         'active_queues': (1.0, 'dump queues being consumed from'),
@@ -680,19 +694,21 @@ class control(_RemoteControl):
 
     Availability: RabbitMQ (AMQP) transport.
 
-    Examples::
+    Examples:
+        .. code-block:: console
 
-        celery control enable_events --timeout=5
-        celery control -d worker1@example.com enable_events
-        celery control -d w1.e.com,w2.e.com enable_events
+            $ celery control enable_events --timeout=5
+            $ celery control -d worker1@example.com enable_events
+            $ celery control -d w1.e.com,w2.e.com enable_events
 
-        celery control -d w1.e.com add_consumer queue_name
-        celery control -d w1.e.com cancel_consumer queue_name
-
-        celery control -d w1.e.com add_consumer queue exchange direct rkey
+            $ celery control -d w1.e.com add_consumer queue_name
+            $ celery control -d w1.e.com cancel_consumer queue_name
 
+            $ celery control add_consumer queue exchange direct rkey
     """
+
     name = 'control'
+
     choices = {
         'enable_events': (1.0, 'tell worker(s) to enable events'),
         'disable_events': (1.0, 'tell worker(s) to disable events'),
@@ -739,6 +755,7 @@ class control(_RemoteControl):
 
 class status(Command):
     """Show list of workers that are online."""
+
     option_list = inspect.option_list
 
     def run(self, *args, **kwargs):
@@ -761,16 +778,18 @@ class status(Command):
 class migrate(Command):
     """Migrate tasks from one broker to another.
 
-    Example:
+    Warning:
+        This command is experimental, make sure you have a backup of
+        the tasks before you continue.
 
-    .. code-block:: console
-
-        $ celery migrate amqp://A.example.com amqp://guest@B.example.com//
+    Example:
+        .. code-block:: console
 
-    NOTE: This command is experimental, make sure you have
-          a backup of the tasks before you continue.
+            $ celery migrate amqp://A.example.com amqp://guest@B.example.com//
     """
+
     args = '<source_url> <dest_url>'
+
     option_list = Command.option_list + (
         Option('--limit', '-n', type='int',
                help='Number of tasks to consume (int)'),
@@ -785,6 +804,7 @@ class migrate(Command):
         Option('--forever', '-F', action='store_true',
                help='Continually migrate tasks until killed.'),
     )
+
     progress_fmt = MIGRATE_PROGRESS_FMT
 
     def on_migrate_task(self, state, body, message):
@@ -805,12 +825,12 @@ class shell(Command):  # pragma: no cover
 
     The following symbols will be added to the main globals:
 
-        - celery:  the current application.
-        - chord, group, chain, chunks,
-          xmap, xstarmap subtask, Task
+        - ``celery``:  the current application.
+        - ``chord``, ``group``, ``chain``, ``chunks``,
+          ``xmap``, ``xstarmap`` ``subtask``, ``Task``
         - all registered tasks.
-
     """
+
     option_list = Command.option_list + (
         Option('--ipython', '-I',
                action='store_true', dest='force_ipython',
@@ -839,17 +859,19 @@ class shell(Command):  # pragma: no cover
         import celery
         import celery.task.base
         self.app.loader.import_default_modules()
-        self.locals = {'app': self.app,
-                       'celery': self.app,
-                       'Task': celery.Task,
-                       'chord': celery.chord,
-                       'group': celery.group,
-                       'chain': celery.chain,
-                       'chunks': celery.chunks,
-                       'xmap': celery.xmap,
-                       'xstarmap': celery.xstarmap,
-                       'subtask': celery.subtask,
-                       'signature': celery.signature}
+        self.locals = {
+            'app': self.app,
+            'celery': self.app,
+            'Task': celery.Task,
+            'chord': celery.chord,
+            'group': celery.group,
+            'chain': celery.chain,
+            'chunks': celery.chunks,
+            'xmap': celery.xmap,
+            'xstarmap': celery.xstarmap,
+            'subtask': celery.subtask,
+            'signature': celery.signature,
+        }
 
         if not without_tasks:
             self.locals.update({
@@ -929,6 +951,7 @@ class shell(Command):  # pragma: no cover
 
 class upgrade(Command):
     """Perform upgrade between versions."""
+
     option_list = Command.option_list + (
         Option('--django', action='store_true',
                help='Upgrade Django project'),
@@ -937,6 +960,7 @@ class upgrade(Command):
         Option('--no-backup', action='store_true',
                help='Dont backup original files'),
     )
+
     choices = {'settings'}
 
     def usage(self, command):
@@ -1017,7 +1041,7 @@ class report(Command):
 
 
 class CeleryCommand(Command):
-    ext_fmt = '{self.namespace}.commands'
+
     commands = {
         'amqp': amqp,
         'beat': beat,
@@ -1038,8 +1062,8 @@ class CeleryCommand(Command):
         'status': status,
         'upgrade': upgrade,
         'worker': worker,
-
     }
+    ext_fmt = '{self.namespace}.commands'
     enable_config_from_cmdline = True
     prog_name = 'celery'
     namespace = 'celery'

+ 4 - 9
celery/bin/celeryd_detach.py

@@ -1,14 +1,9 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.bin.celeryd_detach
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Program used to daemonize the worker
-
-    Using :func:`os.execv` because forking and multiprocessing
-    leads to weird issues (it was a long time ago now, but it
-    could have something to do with the threading mutex bug)
+"""Program used to daemonize the worker.
 
+Using :func:`os.execv` as forking and multiprocessing
+leads to weird issues (it was a long time ago now, but it
+could have something to do with the threading mutex bug)
 """
 from __future__ import absolute_import, unicode_literals
 

+ 15 - 16
celery/bin/events.py

@@ -1,7 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-
-The :program:`celery events` command.
+"""The :program:`celery events` command.
 
 .. program:: celery events
 
@@ -66,7 +64,6 @@ The :program:`celery events` command.
 .. cmdoption:: --executable
 
     Executable to use for the detached process.
-
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -83,21 +80,23 @@ __all__ = ['events']
 class events(Command):
     """Event-stream utilities.
 
-    Commands::
+    Notes:
+        .. code-block:: console
 
-        celery events --app=proj
-            start graphical monitor (requires curses)
-        celery events -d --app=proj
-            dump events to screen.
-        celery events -b amqp://
-        celery events -c <camera> [options]
-            run snapshot camera.
+            # - Start graphical monitor (requires curses)
+            $ celery events --app=proj
+            $ celery events -d --app=proj
+            # - Dump events to screen.
+            $ celery events -b amqp://
+            # - Run snapshot camera.
+            $ celery events -c <camera> [options]
 
-    Examples::
+    Examples:
+        .. code-block:: console
 
-        celery events
-        celery events -d
-        celery events -c mod.attr -F 1.0 --detach --maxrate=100/m -l info
+            $ celery events
+            $ celery events -d
+            $ celery events -c mod.attr -F 1.0 --detach --maxrate=100/m -l info
     """
     doc = __doc__
     supports_args = False

+ 1 - 4
celery/bin/graph.py

@@ -1,10 +1,7 @@
 # -*- coding: utf-8 -*-
-"""
-
-The :program:`celery graph` command.
+"""The :program:`celery graph` command.
 
 .. program:: celery graph
-
 """
 from __future__ import absolute_import, unicode_literals
 

+ 1 - 4
celery/bin/logtool.py

@@ -1,10 +1,7 @@
 # -*- coding: utf-8 -*-
-"""
-
-The :program:`celery logtool` command.
+"""The :program:`celery logtool` command.
 
 .. program:: celery logtool
-
 """
 
 from __future__ import absolute_import, unicode_literals

+ 14 - 13
celery/bin/multi.py

@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
+"""Start multiple worker instances from the command-line.
 
 .. program:: celery multi
 
@@ -92,7 +92,6 @@ Examples
     celery worker -n bar@myhost -c 10
     celery worker -n baz@myhost -c 10
     celery worker -n xuzzy@myhost -c 3
-
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
@@ -173,17 +172,19 @@ class MultiTool(object):
         self.verbose = verbose
         self.no_color = no_color
         self.prog_name = 'celery multi'
-        self.commands = {'start': self.start,
-                         'show': self.show,
-                         'stop': self.stop,
-                         'stopwait': self.stopwait,
-                         'stop_verify': self.stopwait,  # compat alias
-                         'restart': self.restart,
-                         'kill': self.kill,
-                         'names': self.names,
-                         'expand': self.expand,
-                         'get': self.get,
-                         'help': self.help}
+        self.commands = {
+            'start': self.start,
+            'show': self.show,
+            'stop': self.stop,
+            'stopwait': self.stopwait,
+            'stop_verify': self.stopwait,  # compat alias
+            'restart': self.restart,
+            'kill': self.kill,
+            'names': self.names,
+            'expand': self.expand,
+            'get': self.get,
+            'help': self.help,
+        }
 
     def execute_from_commandline(self, argv, cmd='celery worker'):
         argv = list(argv)   # don't modify callers argv.

+ 7 - 8
celery/bin/worker.py

@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
+"""Program used to start a Celery worker instance.
 
 The :program:`celery worker` command (previously known as ``celeryd``)
 
@@ -160,7 +160,6 @@ The :program:`celery worker` command (previously known as ``celeryd``)
 .. cmdoption:: --executable
 
     Executable to use for the detached process.
-
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -184,14 +183,14 @@ __MODULE_DOC__ = __doc__
 class worker(Command):
     """Start worker instance.
 
-    Examples::
-
-        celery worker --app=proj -l info
-        celery worker -A proj -l info -Q hipri,lopri
+    Examples:
+        .. code-block:: console
 
-        celery worker -A proj --concurrency=4
-        celery worker -A proj --concurrency=1000 -P eventlet
+            $ celery worker --app=proj -l info
+            $ celery worker -A proj -l info -Q hipri,lopri
 
+            $ celery worker -A proj --concurrency=4
+            $ celery worker -A proj --concurrency=1000 -P eventlet
     """
     doc = __MODULE_DOC__  # parse help from this too
     namespace = 'worker'

+ 9 - 16
celery/bootsteps.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.bootsteps
-    ~~~~~~~~~~~~~~~~
-
-    A directed acyclic graph of reusable components.
-
-"""
+"""A directed acyclic graph of reusable components."""
 from __future__ import absolute_import, unicode_literals
 
 from collections import deque
@@ -83,13 +77,14 @@ class StepFormatter(GraphFormatter):
 class Blueprint(object):
     """Blueprint containing bootsteps that can be applied to objects.
 
-    :keyword steps: List of steps.
-    :keyword name: Set explicit name for this blueprint.
-    :keyword app: Set the Celery app for this blueprint.
-    :keyword on_start: Optional callback applied after blueprint start.
-    :keyword on_close: Optional callback applied before blueprint close.
-    :keyword on_stopped: Optional callback applied after blueprint stopped.
-
+    Arguments:
+        steps Sequence[Union[str, Step]]: List of steps.
+        name (str): Set explicit name for this blueprint.
+        app (~@Celery): Set the Celery app for this blueprint.
+        on_start (Callable): Optional callback applied after blueprint start.
+        on_close (Callable): Optional callback applied before blueprint close.
+        on_stopped (Callable): Optional callback applied after
+            blueprint stopped.
     """
     GraphFormatter = StepFormatter
 
@@ -206,7 +201,6 @@ class Blueprint(object):
 
         For :class:`StartStopStep` the services created
         will also be added to the objects ``steps`` attribute.
-
         """
         self._debug('Preparing bootsteps.')
         order = self.order = []
@@ -305,7 +299,6 @@ class Step(object):
     is bound to a parent object, and can as such be used
     to initialize attributes in the parent object at
     parent instantiation-time.
-
     """
 
     #: Optional step name, will use ``qualname`` if not specified.

+ 225 - 36
celery/canvas.py

@@ -1,14 +1,9 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.canvas
-    ~~~~~~~~~~~~~
+"""Composing task work-flows.
 
-    Composing task work-flows.
+.. seealso:
 
-    Documentation for some of these types are in :mod:`celery`.
     You should import these from :mod:`celery` and not this module.
-
-
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -33,8 +28,10 @@ from celery.utils.functional import (
 )
 from celery.utils.text import truncate
 
-__all__ = ['Signature', 'chain', 'xmap', 'xstarmap', 'chunks',
-           'group', 'chord', 'signature', 'maybe_signature']
+__all__ = [
+    'Signature', 'chain', 'xmap', 'xstarmap', 'chunks',
+    'group', 'chord', 'signature', 'maybe_signature',
+]
 
 PY3 = sys.version_info[0] == 3
 
@@ -49,7 +46,6 @@ class _getitem_property(object):
     and optionally ``__setitem__``.
 
     Example:
-
         >>> from collections import defaultdict
 
         >>> class Me(dict):
@@ -74,7 +70,6 @@ class _getitem_property(object):
         42
         >>> me.deep
         defaultdict(<type 'dict'>, {'thing': 42})
-
     """
 
     def __init__(self, keypath, doc=None):
@@ -131,20 +126,52 @@ class Signature(dict):
     or to pass tasks around as callbacks while being compatible
     with serializers with a strict type subset.
 
-    :param task: Either a task class/instance, or the name of a task.
-    :keyword args: Positional arguments to apply.
-    :keyword kwargs: Keyword arguments to apply.
-    :keyword options: Additional options to :meth:`Task.apply_async`.
+    Signatures can also be created from tasks:
+
+    - Using the ``.signature()`` method which has the same signature
+      as ``Task.apply_async``:
+
+        .. code-block:: pycon
+
+            >>> add.signature(args=(1,), kwargs={'kw': 2}, options={})
+
+    - or the ``.s()`` shortcut that works for star arguments:
+
+        .. code-block:: pycon
+
+            >>> add.s(1, kw=2)
+
+    - the ``.s()`` shortcut does not allow you to specify execution options
+      but there's a chaning `.set` method that returns the signature:
 
-    Note that if the first argument is a :class:`dict`, the other
-    arguments will be ignored and the values in the dict will be used
-    instead.
+        .. code-block:: pycon
 
-        >>> s = signature('tasks.add', args=(2, 2))
-        >>> signature(s)
-        {'task': 'tasks.add', args=(2, 2), kwargs={}, options={}}
+            >>> add.s(2, 2).set(countdown=10).set(expires=30).delay()
 
+    Note:
+        You should use :func:`~celery.signature` to create new signatures.
+        The ``Signature`` class is the type returned by that function and
+        should be used for ``isinstance`` checks for signatures.
+
+    See Also:
+        :ref:`guide-canvas` for the complete guide.
+
+    Arguments:
+        task (Task, str): Either a task class/instance, or the name of a task.
+        args (Tuple): Positional arguments to apply.
+        kwargs (Dict): Keyword arguments to apply.
+        options (Dict): Additional options to :meth:`Task.apply_async`.
+
+    Note:
+        If the first argument is a :class:`dict`, the other
+        arguments will be ignored and the values in the dict will be used
+        instead::
+
+            >>> s = signature('tasks.add', args=(2, 2))
+            >>> signature(s)
+            {'task': 'tasks.add', args=(2, 2), kwargs={}, options={}}
     """
+
     TYPES = {}
     _app = _type = None
 
@@ -188,18 +215,48 @@ class Signature(dict):
              chord_size=None)
 
     def __call__(self, *partial_args, **partial_kwargs):
+        """Call the task directly (in the current process)."""
         args, kwargs, _ = self._merge(partial_args, partial_kwargs, None)
         return self.type(*args, **kwargs)
 
     def delay(self, *partial_args, **partial_kwargs):
+        """Shortcut to :meth:`apply_async` using star arguments."""
         return self.apply_async(partial_args, partial_kwargs)
 
     def apply(self, args=(), kwargs={}, **options):
-        """Apply this task locally."""
+        """Same as :meth:`apply_async` but executed the task inline instead
+        of sending a task message."""
         # For callbacks: extra args are prepended to the stored args.
         args, kwargs, options = self._merge(args, kwargs, options)
         return self.type.apply(args, kwargs, **options)
 
+    def apply_async(self, args=(), kwargs={}, route_name=None, **options):
+        """Apply this task asynchronously.
+
+        Arguments:
+            args (Tuple): Partial args to be prepended to the existing args.
+            kwargs (Dict): Partial kwargs to be merged with existing kwargs.
+            options (Dict): Partial options to be merged
+                with existing options.
+
+        Returns:
+            ~@AsyncResult: promise of future evaluation.
+
+        See also:
+            :meth:`~@Task.apply_async` and the :ref:`guide-calling` guide.
+        """
+        try:
+            _apply = self._apply_async
+        except IndexError:  # pragma: no cover
+            # no tasks for chain, etc to find type
+            return
+        # For callbacks: extra args are prepended to the stored args.
+        if args or kwargs or options:
+            args, kwargs, options = self._merge(args, kwargs, options)
+        else:
+            args, kwargs, options = self.args, self.kwargs, self.options
+        return _apply(args, kwargs, **options)
+
     def _merge(self, args=(), kwargs={}, options={}, force=False):
         if self.immutable and not force:
             return (self.args, self.kwargs,
@@ -209,6 +266,14 @@ class Signature(dict):
                 dict(self.options, **options) if options else self.options)
 
     def clone(self, args=(), kwargs={}, **opts):
+        """Create a copy of this signature.
+
+        Arguments:
+            args (Tuple): Partial args to be prepended to the existing args.
+            kwargs (Dict): Partial kwargs to be merged with existing kwargs.
+            options (Dict): Partial options to be merged with
+                existing options.
+        """
         # need to deepcopy options so origins links etc. is not modified.
         if args or kwargs or opts:
             args, kwargs, opts = self._merge(args, kwargs, opts)
@@ -225,6 +290,15 @@ class Signature(dict):
 
     def freeze(self, _id=None, group_id=None, chord=None,
                root_id=None, parent_id=None):
+        """Finalize the signature by adding a concrete task id.
+
+        The task will not be called and you should not call the signature
+        twice after freezing it as that will result in two task messages
+        using the same task id.
+
+        Returns:
+            ~@AsyncResult: promise of future evaluation.
+        """
         opts = self.options
         try:
             tid = opts['task_id']
@@ -244,6 +318,9 @@ class Signature(dict):
     _freeze = freeze
 
     def replace(self, args=None, kwargs=None, options=None):
+        """Replace the args, kwargs or options set for this signature.
+        These are only replaced if the argument for the section is
+        not :const:`None`."""
         s = self.clone()
         if args is not None:
             s.args = args
@@ -254,6 +331,12 @@ class Signature(dict):
         return s
 
     def set(self, immutable=None, **options):
+        """Set arbitrary execution options (same as ``.options.update(…)``).
+
+        Returns:
+            Signature: This is a chaining method call
+                (i.e. it will return ``self``).
+        """
         if immutable is not None:
             self.set_immutable(immutable)
         self.options.update(options)
@@ -265,19 +348,6 @@ class Signature(dict):
     def set_parent_id(self, parent_id):
         self.parent_id = parent_id
 
-    def apply_async(self, args=(), kwargs={}, route_name=None, **options):
-        try:
-            _apply = self._apply_async
-        except IndexError:  # pragma: no cover
-            # no tasks for chain, etc to find type
-            return
-        # For callbacks: extra args are prepended to the stored args.
-        if args or kwargs or options:
-            args, kwargs, options = self._merge(args, kwargs, options)
-        else:
-            args, kwargs, options = self.args, self.kwargs, self.options
-        return _apply(args, kwargs, **options)
-
     def _with_list_option(self, key):
         items = self.options.setdefault(key, [])
         if not isinstance(items, MutableSequence):
@@ -295,9 +365,23 @@ class Signature(dict):
         items.extend(maybe_list(value))
 
     def link(self, callback):
+        """Add a callback task to be applied if this task
+        executes successfully.
+
+        Returns:
+            Signature: the argument passed, for chaining
+                or use with :func:`~functools.reduce`.
+        """
         return self.append_to_list_option('link', callback)
 
     def link_error(self, errback):
+        """Add a callback task to be applied if an error occurs
+        while executing this task.
+
+        Returns:
+            Signature: the argument passed, for chaining
+                or use with :func:`~functools.reduce`.
+        """
         return self.append_to_list_option('link_error', errback)
 
     def on_error(self, errback):
@@ -309,12 +393,13 @@ class Signature(dict):
 
         calls the ``add`` task, not the ``errback`` task, but the
         reverse is true for :meth:`link_error`.
-
         """
         self.link_error(errback)
         return self
 
     def flatten_links(self):
+        """Return a recursive list of dependencies (unchain if you will,
+        but with links intact)."""
         return list(_chain.from_iterable(_chain(
             [[self]],
             (link.flatten_links()
@@ -420,6 +505,50 @@ class Signature(dict):
 @Signature.register_type
 @python_2_unicode_compatible
 class chain(Signature):
+    """Chains tasks together, so that each tasks follows each other
+    by being applied as a callback of the previous task.
+
+    Note:
+        If called with only one argument, then that argument must
+        be an iterable of tasks to chain, which means you can
+        use this with a generator expression.
+
+    Example:
+        This is effectively :math:`((2 + 2) + 4)`:
+
+        .. code-block:: pycon
+
+            >>> res = chain(add.s(2, 2), add.s(4))()
+            >>> res.get()
+            8
+
+        Calling a chain will return the result of the last task in the chain.
+        You can get to the other tasks by following the ``result.parent``'s:
+
+        .. code-block:: pycon
+
+            >>> res.parent.get()
+            4
+
+        Using a generator expression:
+
+        .. code-block:: pycon
+
+            >>> lazy_chain = chain(add.s(i) for i in range(10))
+            >>> res = lazy_chain(3)
+
+    Arguments:
+        *tasks (Signature): List of task signatures to chain.
+            If only one argument is passed and that argument is
+            an iterable, then that will be used as the list of signatures
+            to chain instead.  This means that you can use a generator
+            expression.
+
+    Returns:
+        ~celery.chain: A lazy signature that can be called to apply the first
+            task in the chain.  When that task succeeed the next task in the
+            chain is applied, and so on.
+    """
     tasks = _getitem_property('kwargs.tasks', 'Tasks in chain.')
 
     def __init__(self, *tasks, **options):
@@ -715,6 +844,34 @@ def _maybe_group(tasks, app):
 @Signature.register_type
 @python_2_unicode_compatible
 class group(Signature):
+    """Creates a group of tasks to be executed in parallel.
+
+    A group is lazy so you must call it to take action and evaluate
+    the group.
+
+    Note:
+        If only one argument is passed, and that argument is an iterable
+        then that will be used as the list of tasks instead, which
+        means you can use ``group`` with generator expressions.
+
+    Example:
+        >>> lazy_group = group([add.s(2, 2), add.s(4, 4)])
+        >>> promise = lazy_group()  # <-- evaluate: returns lazy result.
+        >>> promise.get()  # <-- will wait for the task to return
+        [4, 8]
+
+    Arguments:
+        *tasks (Signature): A list of signatures that this group will call.
+            If there is only one argument, and that argument is an iterable,
+            then that will define the list of signatures instead.
+        **options (Any): Execution options applied to all tasks
+            in the group.
+
+    Returns:
+        ~celery.group: signature that when called will then call all of the
+            tasks in the group (and return a :class:`GroupResult` instance
+            that can be used to inspect the state of the group).
+    """
     tasks = _getitem_property('kwargs.tasks', 'Tasks in group.')
 
     def __init__(self, *tasks, **options):
@@ -913,7 +1070,31 @@ class group(Signature):
 @Signature.register_type
 @python_2_unicode_compatible
 class chord(Signature):
+    """Barrier synchronization primitive.
+
+    A chord consists of a header and a body.
+
+    The header is a group of tasks that must complete before the callback is
+    called.  A chord is essentially a callback for a group of tasks.
+
+    The body is applied with the return values of all the header
+    tasks as a list.
 
+    Example:
+
+        The chrod:
+
+        .. code-block:: pycon
+
+            >>> res = chord([add.s(2, 2), add.s(4, 4)])(sum_task.s())
+
+        is effectively :math:`\Sigma ((2 + 2) + (4 + 4))`:
+
+        .. code-block:: pycon
+
+            >>> res.get()
+            12
+    """
     def __init__(self, header, body=None, task='celery.chord',
                  args=(), kwargs={}, app=None, **options):
         Signature.__init__(
@@ -1064,6 +1245,14 @@ class chord(Signature):
 
 
 def signature(varies, *args, **kwargs):
+    """Create new signature
+
+    - if the first argument is a signature already then it's cloned.
+    - if the first argument is a dict, then a Signature version is returned.
+
+    Returns:
+        Signature: The resulting signature.
+    """
     app = kwargs.get('app')
     if isinstance(varies, dict):
         if isinstance(varies, abstract.CallableSignature):

+ 1 - 7
celery/concurrency/__init__.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.concurrency
-    ~~~~~~~~~~~~~~~~~~
-
-    Pool implementation abstract factory, and alias definitions.
-
-"""
+"""Pool implementation abstract factory, and alias definitions."""
 from __future__ import absolute_import, unicode_literals
 
 # Import from kombu directly as it's used

+ 14 - 17
celery/concurrency/asynpool.py

@@ -1,20 +1,17 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.concurrency.asynpool
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    .. note::
+"""Version of multiprocessing.Pool using Async I/O.
 
-        This module will be moved soon, so don't use it directly.
+.. note::
 
-    Non-blocking version of :class:`multiprocessing.Pool`.
+    This module will be moved soon, so don't use it directly.
 
-    This code deals with three major challenges:
+This is a non-blocking version of :class:`multiprocessing.Pool`.
 
-        1) Starting up child processes and keeping them running.
-        2) Sending jobs to the processes and receiving results back.
-        3) Safely shutting down this system.
+This code deals with three major challenges:
 
+#. Starting up child processes and keeping them running.
+#. Sending jobs to the processes and receiving results back.
+#. Safely shutting down this system.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -153,21 +150,22 @@ def _select(readers=None, writers=None, err=None, timeout=0,
     """Simple wrapper to :class:`~select.select`, using :`~select.poll`
     as the implementation.
 
-    :param readers: Set of reader fds to test if readable.
-    :param writers: Set of writer fds to test if writable.
-    :param err: Set of fds to test for error condition.
+    Arguments:
+        readers (Set[Fd]): Set of reader fds to test if readable.
+        writers (Set[Fd]): Set of writer fds to test if writable.
+        err (Set[Fd]): Set of fds to test for error condition.
 
     All fd sets passed must be mutable as this function
     will remove non-working fds from them, this also means
     the caller must make sure there are still fds in the sets
     before calling us again.
 
-    :returns: tuple of ``(readable, writable, again)``, where
+    Returns:
+        Tuple[Set, Set, Set]: of ``(readable, writable, again)``, where
         ``readable`` is a set of fds that have data available for read,
         ``writable`` is a set of fds that is ready to be written to
         and ``again`` is a flag that if set means the caller must
         throw away the result and call us again.
-
     """
     readers = set() if readers is None else readers
     writers = set() if writers is None else writers
@@ -1158,7 +1156,6 @@ class AsynPool(_pool.Pool):
 
         In Celery this is called whenever the transport connection is lost
         (consumer restart), and when a process is terminated.
-
         """
         resq = proc.outq._reader
         on_state_change = self._result_handler.on_state_change

+ 1 - 8
celery/concurrency/base.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.concurrency.base
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    TaskPool interface.
-
-"""
+"""Base Execution Pool"""
 from __future__ import absolute_import, unicode_literals
 
 import logging
@@ -146,7 +140,6 @@ class BasePool(object):
 
         Callbacks should optimally return as soon as possible since
         otherwise the thread which handles the result will get blocked.
-
         """
         if self._does_debug:
             logger.debug('TaskPool: Apply %s (args:%s kwargs:%s)',

+ 1 - 7
celery/concurrency/eventlet.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.concurrency.eventlet
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Eventlet pool implementation.
-
-"""
+"""Eventlet execution pool."""
 from __future__ import absolute_import, unicode_literals
 
 import sys

+ 1 - 7
celery/concurrency/gevent.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.concurrency.gevent
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    gevent pool implementation.
-
-"""
+"""Gevent execution pool."""
 from __future__ import absolute_import, unicode_literals
 
 from time import time

+ 4 - 10
celery/concurrency/prefork.py

@@ -1,10 +1,7 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.concurrency.prefork
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Pool implementation using :mod:`multiprocessing`.
+"""Prefork execution pool.
 
+Pool implementation using :mod:`multiprocessing`.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -45,10 +42,8 @@ warning, debug = logger.warning, logger.debug
 def process_initializer(app, hostname):
     """Pool child process initializer.
 
-    This will initialize a child pool process to ensure the correct
-    app instance is used and things like
-    logging works.
-
+    Initialize the child pool process to ensure the correct
+    app instance is used and things like logging works.
     """
     _set_task_join_will_block(True)
     platforms.signals.reset(*WORKER_SIGRESET)
@@ -90,7 +85,6 @@ def process_destructor(pid, exitcode):
     """Pool child process destructor
 
     Dispatch the :signal:`worker_process_shutdown` signal.
-
     """
     signals.worker_process_shutdown.send(
         sender=None, pid=pid, exitcode=exitcode,

+ 1 - 7
celery/concurrency/solo.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.concurrency.solo
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Single-threaded pool implementation.
-
-"""
+"""Single-threaded execution pool."""
 from __future__ import absolute_import, unicode_literals
 
 import os

+ 5 - 10
celery/contrib/abortable.py

@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
-"""
-=========================
+"""Abortable Tasks.
+
 Abortable tasks overview
 =========================
 
@@ -82,7 +82,6 @@ have it block until the task is finished.
    producer and the consumer.  This is currently implemented through the
    database backend.  Therefore, this class will only work with the
    database backends.
-
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -113,7 +112,6 @@ class AbortableAsyncResult(AsyncResult):
 
     Specifically, this gives the `AsyncResult` a :meth:`abort()` method,
     which sets the state of the underlying Task to `'ABORTED'`.
-
     """
 
     def is_aborted(self):
@@ -126,10 +124,9 @@ class AbortableAsyncResult(AsyncResult):
         Abortable tasks monitor their state at regular intervals and
         terminate execution if so.
 
-        Be aware that invoking this method does not guarantee when the
-        task will be aborted (or even if the task will be aborted at
-        all).
-
+        Warning:
+            Be aware that invoking this method does not guarantee when the
+            task will be aborted (or even if the task will be aborted at all).
         """
         # TODO: store_result requires all four arguments to be set,
         # but only state should be updated here
@@ -144,7 +141,6 @@ class AbortableTask(Task):
     All subclasses of :class:`AbortableTask` must call the
     :meth:`is_aborted` method periodically and act accordingly when
     the call evaluates to :const:`True`.
-
     """
     abstract = True
 
@@ -163,7 +159,6 @@ class AbortableTask(Task):
         backend (for example a database query), so find a good balance
         between calling it regularly (for responsiveness), but not too
         often (for performance).
-
         """
         task_id = kwargs.get('task_id', self.request.id)
         result = self.AsyncResult(task_id)

+ 32 - 42
celery/contrib/migrate.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.contrib.migrate
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Migration tools.
-
-"""
+"""Message migration tools (Broker <-> Broker)."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import socket
@@ -130,29 +124,30 @@ def move(predicate, connection=None, exchange=None, routing_key=None,
          **kwargs):
     """Find tasks by filtering them and move the tasks to a new queue.
 
-    :param predicate: Filter function used to decide which messages
-        to move.  Must accept the standard signature of ``(body, message)``
-        used by Kombu consumer callbacks. If the predicate wants the message
-        to be moved it must return either:
-
-            1) a tuple of ``(exchange, routing_key)``, or
-
-            2) a :class:`~kombu.entity.Queue` instance, or
-
-            3) any other true value which means the specified
-               ``exchange`` and ``routing_key`` arguments will be used.
-
-    :keyword connection: Custom connection to use.
-    :keyword source: Optional list of source queues to use instead of the
-        default (which is the queues in :setting:`task_queues`).
-        This list can also contain new :class:`~kombu.entity.Queue` instances.
-    :keyword exchange: Default destination exchange.
-    :keyword routing_key: Default destination routing key.
-    :keyword limit: Limit number of messages to filter.
-    :keyword callback: Callback called after message moved,
-        with signature ``(state, body, message)``.
-    :keyword transform: Optional function to transform the return
-        value (destination) of the filter function.
+    Arguments:
+        predicate (Callable): Filter function used to decide which messages
+            to move.  Must accept the standard signature of ``(body, message)``
+            used by Kombu consumer callbacks. If the predicate wants the
+            message to be moved it must return either:
+
+                1) a tuple of ``(exchange, routing_key)``, or
+
+                2) a :class:`~kombu.entity.Queue` instance, or
+
+                3) any other true value which means the specified
+                    ``exchange`` and ``routing_key`` arguments will be used.
+        connection (kombu.Connection): Custom connection to use.
+        source: List[Union[str, kombu.Queue]]: Optional list of source
+            queues to use instead of the default (which is the queues
+            in :setting:`task_queues`).  This list can also contain
+            :class:`~kombu.entity.Queue` instances.
+        exchange (str, kombu.Exchange): Default destination exchange.
+        routing_key (str): Default destination routing key.
+        limit (int): Limit number of messages to filter.
+        callback (Callable): Callback called after message moved,
+            with signature ``(state, body, message)``.
+        transform (Callable): Optional function to transform the return
+            value (destination) of the filter function.
 
     Also supports the same keyword arguments as :func:`start_filter`.
 
@@ -184,7 +179,6 @@ def move(predicate, connection=None, exchange=None, routing_key=None,
     or a :class:`~kombu.entitiy.Queue` instance.
     Any other true value means that the task will be moved to the
     default exchange/routing_key.
-
     """
     app = app_or_default(app)
     queues = [_maybe_queue(app, queue) for queue in source or []] or None
@@ -309,11 +303,11 @@ def start_filter(app, conn, filter, limit=None, timeout=1.0,
 def move_task_by_id(task_id, dest, **kwargs):
     """Find a task by id and move it to another queue.
 
-    :param task_id: Id of task to move.
-    :param dest: Destination queue.
-
-    Also supports the same keyword arguments as :func:`move`.
-
+    Arguments:
+        task_id (str): Id of task to find and move.
+        dest: (str, kombu.Queue): Destination queue.
+        **kwargs (Any): Also supports the same keyword
+            arguments as :func:`move`.
     """
     return move_by_idmap({task_id: dest}, **kwargs)
 
@@ -322,14 +316,12 @@ def move_by_idmap(map, **kwargs):
     """Moves tasks by matching from a ``task_id: queue`` mapping,
     where ``queue`` is a queue to move the task to.
 
-    Example::
-
+    Example:
         >>> move_by_idmap({
         ...     '5bee6e82-f4ac-468e-bd3d-13e8600250bc': Queue('name'),
         ...     'ada8652d-aef3-466b-abd2-becdaf1b82b3': Queue('name'),
         ...     '3a2b140d-7db1-41ba-ac90-c36a0ef4ab1f': Queue('name')},
         ...   queues=['hipri'])
-
     """
     def task_id_in_map(body, message):
         return map.get(body['id'])
@@ -343,13 +335,11 @@ def move_by_taskmap(map, **kwargs):
     """Moves tasks by matching from a ``task_name: queue`` mapping,
     where ``queue`` is the queue to move the task to.
 
-    Example::
-
+    Example:
         >>> move_by_taskmap({
         ...     'tasks.add': Queue('name'),
         ...     'tasks.mul': Queue('name'),
         ... })
-
     """
 
     def task_name_in_map(body, message):

+ 16 - 13
celery/contrib/rdb.py

@@ -1,12 +1,14 @@
 # -*- coding: utf-8 -*-
-"""
-``celery.contrib.rdb``
-======================
+"""Remote Debugger.
+
+Introduction
+============
 
-Remote debugger for Celery tasks running in multiprocessing pool workers.
-Inspired by http://snippets.dzone.com/posts/show/7248
+This is a remote debugger for Celery tasks running in multiprocessing
+pool workers.  Inspired by http://snippets.dzone.com/posts/show/7248
 
-**Usage**
+Usage
+-----
 
 .. code-block:: python
 
@@ -19,8 +21,8 @@ Inspired by http://snippets.dzone.com/posts/show/7248
         rdb.set_trace()
         return result
 
-
-**Environment Variables**
+Environment Variables
+=====================
 
 .. envvar:: CELERY_RDB_HOST
 
@@ -38,7 +40,6 @@ Inspired by http://snippets.dzone.com/posts/show/7248
     Base port to bind to.  Default is 6899.
     The debugger will try to find an available port starting from the
     base port.  The selected port will be logged by the worker.
-
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
@@ -53,13 +54,15 @@ from billiard.process import current_process
 
 from celery.five import range
 
-__all__ = ['CELERY_RDB_HOST', 'CELERY_RDB_PORT', 'default_port',
-           'Rdb', 'debugger', 'set_trace']
+__all__ = [
+    'CELERY_RDB_HOST', 'CELERY_RDB_PORT', 'DEFAULT_PORT',
+    'Rdb', 'debugger', 'set_trace',
+]
 
-default_port = 6899
+DEFAULT_PORT = 6899
 
 CELERY_RDB_HOST = os.environ.get('CELERY_RDB_HOST') or '127.0.0.1'
-CELERY_RDB_PORT = int(os.environ.get('CELERY_RDB_PORT') or default_port)
+CELERY_RDB_PORT = int(os.environ.get('CELERY_RDB_PORT') or DEFAULT_PORT)
 
 #: Holds the currently active debugger.
 _current = [None]

+ 5 - 7
celery/contrib/sphinx.py

@@ -1,11 +1,11 @@
 # -*- coding: utf-8 -*-
-"""
-``celery.contrib.sphinx``
-=========================
+"""Sphinx documentation plugin used to document tasks.
 
-Sphinx documentation plugin
+Introduction
+============
 
-**Usage**
+Usage
+-----
 
 Add the extension to your :file:`docs/conf.py` configuration module:
 
@@ -21,14 +21,12 @@ then you can change the ``celery_task_prefix`` configuration value:
 
     celery_task_prefix = '(task)'  # < default
 
-
 With the extension installed `autodoc` will automatically find
 task decorated objects and generate the correct (as well as
 add a ``(task)`` prefix), and you can also refer to the tasks
 using `:task:proj.tasks.add` syntax.
 
 Use ``.. autotask::`` to manually document a task.
-
 """
 from __future__ import absolute_import, unicode_literals
 

+ 54 - 58
celery/events/__init__.py

@@ -1,12 +1,9 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.events
-    ~~~~~~~~~~~~~
-
-    Events is a stream of messages sent for certain actions occurring
-    in the worker (and clients if :setting:`task_send_sent_event`
-    is enabled), used for monitoring purposes.
+"""Monitoring Event Receiver+Dispatcher.
 
+Events is a stream of messages sent for certain actions occurring
+in the worker (and clients if :setting:`task_send_sent_event`
+is enabled), used for monitoring purposes.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -52,7 +49,6 @@ def Event(type, _fields=None, __dict__=dict, __now__=time.time, **fields):
 
     An event is a dictionary, the only required field is ``type``.
     A ``timestamp`` field will be set to the current time if not provided.
-
     """
     event = __dict__(_fields, **fields) if _fields else fields
     if 'timestamp' not in event:
@@ -72,7 +68,6 @@ def group_from(type):
 
         >>> group_from('custom-my-event')
         'custom'
-
     """
     return type.split('-', 1)[0]
 
@@ -80,29 +75,30 @@ def group_from(type):
 class EventDispatcher(object):
     """Dispatches event messages.
 
-    :param connection: Connection to the broker.
-
-    :keyword hostname: Hostname to identify ourselves as,
-        by default uses the hostname returned by
-        :func:`~celery.utils.anon_nodename`.
+    Arguments:
+        connection (kombu.Connection): Connection to the broker.
 
-    :keyword groups: List of groups to send events for.  :meth:`send` will
-        ignore send requests to groups not in this list.
-        If this is :const:`None`, all events will be sent. Example groups
-        include ``"task"`` and ``"worker"``.
+        hostname (str): Hostname to identify ourselves as,
+            by default uses the hostname returned by
+            :func:`~celery.utils.anon_nodename`.
 
-    :keyword enabled: Set to :const:`False` to not actually publish any events,
-        making :meth:`send` a no-op.
+        groups (Sequence[str]): List of groups to send events for.
+            :meth:`send` will ignore send requests to groups not in this list.
+            If this is :const:`None`, all events will be sent. Example groups
+            include ``"task"`` and ``"worker"``.
 
-    :keyword channel: Can be used instead of `connection` to specify
-        an exact channel to use when sending events.
+        enabled (bool): Set to :const:`False` to not actually publish any
+            events, making :meth:`send` a no-op.
 
-    :keyword buffer_while_offline: If enabled events will be buffered
-       while the connection is down. :meth:`flush` must be called
-       as soon as the connection is re-established.
+        channel (kombu.Channel): Can be used instead of `connection` to specify
+            an exact channel to use when sending events.
 
-    You need to :meth:`close` this after use.
+        buffer_while_offline (bool): If enabled events will be buffered
+            while the connection is down. :meth:`flush` must be called
+            as soon as the connection is re-established.
 
+    Note:
+        You need to :meth:`close` this after use.
     """
     DISABLED_TRANSPORTS = {'sql'}
 
@@ -175,19 +171,20 @@ class EventDispatcher(object):
         """Publish event using a custom :class:`~kombu.Producer`
         instance.
 
-        :param type: Event type name, with group separated by dash (`-`).
-        :param fields: Dictionary of event fields, must be json serializable.
-        :param producer: :class:`~kombu.Producer` instance to use,
-            only the ``publish`` method will be called.
-        :keyword retry: Retry in the event of connection failure.
-        :keyword retry_policy: Dict of custom retry policy, see
-            :meth:`~kombu.Connection.ensure`.
-        :keyword blind: Don't set logical clock value (also do not forward
-            the internal logical clock).
-        :keyword Event: Event type used to create event,
-            defaults to :func:`Event`.
-        :keyword utcoffset: Function returning the current utcoffset in hours.
-
+        Arguments:
+            type (str): Event type name, with group separated by dash (`-`).
+                fields: Dictionary of event fields, must be json serializable.
+            producer (kombu.Producer): Producer instance to use:
+                only the ``publish`` method will be called.
+            retry (bool): Retry in the event of connection failure.
+            retry_policy (Mapping): Map of custom retry policy options.
+                See :meth:`~kombu.Connection.ensure`.
+            blind (bool): Don't set logical clock value (also do not forward
+                the internal logical clock).
+            Event (Callable): Event type used to create event.
+                Defaults to :func:`Event`.
+            utcoffset (Callable): Function returning the current
+                utc offset in hours.
         """
         clock = None if blind else self.clock.forward()
         event = Event(type, hostname=self.hostname, utcoffset=utcoffset(),
@@ -220,17 +217,18 @@ class EventDispatcher(object):
              retry_policy=None, Event=Event, **fields):
         """Send event.
 
-        :param type: Event type name, with group separated by dash (`-`).
-        :keyword retry: Retry in the event of connection failure.
-        :keyword retry_policy: Dict of custom retry policy, see
-            :meth:`~kombu.Connection.ensure`.
-        :keyword blind: Don't set logical clock value (also do not forward
-            the internal logical clock).
-        :keyword Event: Event type used to create event,
-            defaults to :func:`Event`.
-        :keyword utcoffset: Function returning the current utcoffset in hours.
-        :keyword \*\*fields: Event fields, must be json serializable.
-
+        Arguments:
+            type (str): Event type name, with group separated by dash (`-`).
+            retry (bool): Retry in the event of connection failure.
+            retry_policy (Mapping): Map of custom retry policy options.
+                See :meth:`~kombu.Connection.ensure`.
+            blind (bool): Don't set logical clock value (also do not forward
+                the internal logical clock).
+            Event (Callable): Event type used to create event,
+                defaults to :func:`Event`.
+            utcoffset (Callable): unction returning the current utc offset
+                in hours.
+            **fields (Any): Event fields -- must be json serializable.
         """
         if self.enabled:
             groups, group = self.groups, group_from(type)
@@ -288,13 +286,12 @@ class EventDispatcher(object):
 class EventReceiver(ConsumerMixin):
     """Capture events.
 
-    :param connection: Connection to the broker.
-    :keyword handlers: Event handlers.
-
-    :attr:`handlers` is a dict of event types and their handlers,
-    the special handler `"*"` captures all events that doesn't have a
-    handler.
-
+    Arguments:
+        connection (kombu.Connection): Connection to the broker.
+        handlers (Mapping[Callable]): Event handlers.
+            This is  a map of event type names and their handlers.
+            The special handler `"*"` captures all events that don't have a
+            handler.
     """
     app = None
 
@@ -361,7 +358,6 @@ class EventReceiver(ConsumerMixin):
         This has to run in the main process, and it will never stop
         unless :attr:`EventDispatcher.should_stop` is set to True, or
         forced via :exc:`KeyboardInterrupt` or :exc:`SystemExit`.
-
         """
         return list(self.consume(limit=limit, timeout=timeout, wakeup=wakeup))
 

+ 10 - 14
celery/events/cursesmon.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.events.cursesmon``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Graphical monitor of Celery events using curses.
-
-"""
+"""Graphical monitor of Celery events using curses."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import curses
@@ -62,13 +56,15 @@ class CursesMonitor(object):  # pragma: no cover
         self.app = app
         self.keymap = keymap or self.keymap
         self.state = state
-        default_keymap = {'J': self.move_selection_down,
-                          'K': self.move_selection_up,
-                          'C': self.revoke_selection,
-                          'T': self.selection_traceback,
-                          'R': self.selection_result,
-                          'I': self.selection_info,
-                          'L': self.selection_rate_limit}
+        default_keymap = {
+            'J': self.move_selection_down,
+            'K': self.move_selection_up,
+            'C': self.revoke_selection,
+            'T': self.selection_traceback,
+            'R': self.selection_result,
+            'I': self.selection_info,
+            'L': self.selection_rate_limit,
+        }
         self.keymap = dict(default_keymap, **self.keymap)
         self.lock = threading.RLock()
 

+ 8 - 9
celery/events/dumper.py

@@ -1,11 +1,8 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.events.dumper
-    ~~~~~~~~~~~~~~~~~~~~
-
-    This is a simple program that dumps events to the console
-    as they happen. Think of it like a `tcpdump` for Celery events.
+"""Utility to dump events to screen.
 
+This is a simple program that dumps events to the console
+as they happen. Think of it like a `tcpdump` for Celery events.
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
@@ -21,9 +18,11 @@ __all__ = ['Dumper', 'evdump']
 
 TASK_NAMES = LRUCache(limit=0xFFF)
 
-HUMAN_TYPES = {'worker-offline': 'shutdown',
-               'worker-online': 'started',
-               'worker-heartbeat': 'heartbeat'}
+HUMAN_TYPES = {
+    'worker-offline': 'shutdown',
+    'worker-online': 'started',
+    'worker-heartbeat': 'heartbeat',
+}
 
 CONNECTION_ERROR = """\
 -> Cannot connect to %s: %s.

+ 7 - 9
celery/events/snapshot.py

@@ -1,14 +1,11 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.events.snapshot
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Consuming the events as a stream is not always suitable
-    so this module implements a system to take snapshots of the
-    state of a cluster at regular intervals.  There is a full
-    implementation of this writing the snapshots to a database
-    in :mod:`djcelery.snapshots` in the `django-celery` distribution.
+"""Periodically store events in a database.
 
+Consuming the events as a stream is not always suitable
+so this module implements a system to take snapshots of the
+state of a cluster at regular intervals.  There is a full
+implementation of this writing the snapshots to a database
+in :mod:`djcelery.snapshots` in the `django-celery` distribution.
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
@@ -28,6 +25,7 @@ logger = get_logger('celery.evcam')
 
 
 class Polaroid(object):
+
     timer = None
     shutter_signal = Signal(providing_args=('state',))
     cleanup_signal = Signal()

+ 15 - 20
celery/events/state.py

@@ -1,20 +1,17 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.events.state
-    ~~~~~~~~~~~~~~~~~~~
-
-    This module implements a data-structure used to keep
-    track of the state of a cluster of workers and the tasks
-    it is working on (by consuming events).
+"""In-memory representation of cluster state.
 
-    For every event consumed the state is updated,
-    so the state represents the state of the cluster
-    at the time of the last event.
+This module implements a data-structure used to keep
+track of the state of a cluster of workers and the tasks
+it is working on (by consuming events).
 
-    Snapshots (:mod:`celery.events.snapshot`) can be used to
-    take "pictures" of this state at regular intervals
-    to e.g. store that in a database.
+For every event consumed the state is updated,
+so the state represents the state of the cluster
+at the time of the last event.
 
+Snapshots (:mod:`celery.events.snapshot`) can be used to
+take "pictures" of this state at regular intervals
+to e.g. store that in a database.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -90,7 +87,6 @@ class CallableDefaultdict(defaultdict):
 
         >>> add_tasks = list(state.tasks_by_type(
         ...     'proj.tasks.add', reverse=True))
-
     """
 
     def __init__(self, fun, *args, **kwargs):
@@ -461,7 +457,8 @@ class State(object):
     def get_or_create_worker(self, hostname, **kwargs):
         """Get or create worker by hostname.
 
-        Return tuple of ``(worker, was_created)``.
+        Returns:
+            Tuple: of ``(worker, was_created)`` pairs.
         """
         try:
             worker = self.workers[hostname]
@@ -656,8 +653,8 @@ class State(object):
         This is slower than accessing :attr:`tasks_by_type`,
         but will be ordered by time.
 
-        Return a generator giving ``(uuid, Task)`` tuples.
-
+        Returns:
+            Generator: giving ``(uuid, Task)`` pairs.
         """
         return islice(
             ((uuid, task) for uuid, task in self.tasks_by_time(reverse=reverse)
@@ -668,9 +665,7 @@ class State(object):
     def _tasks_by_worker(self, hostname, limit=None, reverse=True):
         """Get all tasks by worker.
 
-        This is slower than accessing :attr:`tasks_by_worker`,
-        but will be ordered by time.
-
+        Slower than accessing :attr:`tasks_by_worker`, but ordered by time.
         """
         return islice(
             ((uuid, task) for uuid, task in self.tasks_by_time(reverse=reverse)

+ 2 - 8
celery/exceptions.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.exceptions
-    ~~~~~~~~~~~~~~~~~
-
-    This module contains all exceptions used by the Celery API.
-
-"""
+"""This module contains all exceptions used by the Celery API."""
 from __future__ import absolute_import, unicode_literals
 
 import numbers
@@ -46,8 +40,8 @@ class SecurityError(CeleryError):
     """Security related exceptions.
 
     Handle with care.
-
     """
+    pass
 
 
 class TaskPredicate(CeleryError):

+ 1 - 9
celery/five.py

@@ -1,13 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.five
-    ~~~~~~~~~~~
-
-    Compatibility implementations of features
-    only available in newer Python versions.
-
-
-"""
+"""Python 2/3 compatibility utilities."""
 from __future__ import absolute_import, unicode_literals
 
 import operator

+ 1 - 0
celery/fixups/django.py

@@ -1,3 +1,4 @@
+"""Django-specific customization."""
 from __future__ import absolute_import, unicode_literals
 
 import os

+ 8 - 9
celery/loaders/__init__.py

@@ -1,11 +1,8 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.loaders
-    ~~~~~~~~~~~~~~
-
-    Loaders define how configuration is read, what happens
-    when workers start, when tasks are executed and so on.
+"""Get loader by name.
 
+Loaders define how configuration is read, what happens
+when workers start, when tasks are executed and so on.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -13,9 +10,11 @@ from celery.utils.imports import symbol_by_name, import_from_cwd
 
 __all__ = ['get_loader_cls']
 
-LOADER_ALIASES = {'app': 'celery.loaders.app:AppLoader',
-                  'default': 'celery.loaders.default:Loader',
-                  'django': 'djcelery.loaders:DjangoLoader'}
+LOADER_ALIASES = {
+    'app': 'celery.loaders.app:AppLoader',
+    'default': 'celery.loaders.default:Loader',
+    'django': 'djcelery.loaders:DjangoLoader',
+}
 
 
 def get_loader_cls(loader):

+ 1 - 7
celery/loaders/app.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.loaders.app
-    ~~~~~~~~~~~~~~~~~~
-
-    The default loader used with custom app instances.
-
-"""
+"""The default loader used with custom app instances."""
 from __future__ import absolute_import, unicode_literals
 
 from .base import BaseLoader

+ 2 - 8
celery/loaders/base.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.loaders.base
-    ~~~~~~~~~~~~~~~~~~~
-
-    Loader base class.
-
-"""
+"""Loader base class."""
 from __future__ import absolute_import, unicode_literals
 
 import imp as _imp
@@ -29,6 +23,7 @@ from celery.utils.imports import (
 __all__ = ['BaseLoader']
 
 _RACE_PROTECTION = False
+
 CONFIG_INVALID_NAME = """\
 Error: Module '{module}' doesn't exist, or it's not a valid \
 Python module name.
@@ -58,7 +53,6 @@ class BaseLoader(object):
             See :meth:`on_worker_shutdown`.
 
         * What modules are imported to find tasks?
-
     """
     builtin_modules = frozenset()
     configured = False

+ 1 - 7
celery/loaders/default.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.loaders.default
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    The default loader used when no custom app has been initialized.
-
-"""
+"""The default loader used when no custom app has been initialized."""
 from __future__ import absolute_import, unicode_literals
 
 import os

+ 5 - 11
celery/local.py

@@ -1,14 +1,10 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.local
-    ~~~~~~~~~~~~
-
-    This module contains critical utilities that
-    needs to be loaded as soon as possible, and that
-    shall not load any third party modules.
+"""Proxy/PromiseProxy implementation.
 
-    Parts of this module is Copyright by Werkzeug Team.
+This module contains critical utilities that needs to be loaded as
+soon as possible, and that shall not load any third party modules.
 
+Parts of this module is Copyright by Werkzeug Team.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -101,8 +97,7 @@ class Proxy(object):
     def _get_current_object(self):
         """Return the current object.  This is useful if you want the real
         object behind the proxy at a time for performance reasons or because
-        you want to pass the object into a different context.
-        """
+        you want to pass the object into a different context."""
         loc = object.__getattribute__(self, '_Proxy__local')
         if not hasattr(loc, '__release_local__'):
             return loc(*self.__args, **self.__kwargs)
@@ -307,7 +302,6 @@ class PromiseProxy(Proxy):
 
     :class:`Proxy` will evaluate the object each time, while the
     promise will only evaluate it once.
-
     """
 
     __slots__ = ('__pending__',)

+ 74 - 88
celery/platforms.py

@@ -1,12 +1,6 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.platforms
-    ~~~~~~~~~~~~~~~~
-
-    Utilities dealing with platform specifics: signals, daemonization,
-    users, groups, and so on.
-
-"""
+"""Utilities dealing with platform specifics: signals, daemonization,
+users, groups, and so on."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import atexit
@@ -41,14 +35,16 @@ pwd = try_import('pwd')
 grp = try_import('grp')
 mputil = try_import('multiprocessing.util')
 
-__all__ = ['EX_OK', 'EX_FAILURE', 'EX_UNAVAILABLE', 'EX_USAGE', 'SYSTEM',
-           'IS_macOS', 'IS_WINDOWS', 'pyimplementation', 'LockFailed',
-           'get_fdmax', 'Pidfile', 'create_pidlock',
-           'close_open_fds', 'DaemonContext', 'detached', 'parse_uid',
-           'parse_gid', 'setgroups', 'initgroups', 'setgid', 'setuid',
-           'maybe_drop_privileges', 'signals', 'set_process_title',
-           'set_mp_process_title', 'get_errno_name', 'ignore_errno',
-           'fd_by_path', 'isatty']
+__all__ = [
+    'EX_OK', 'EX_FAILURE', 'EX_UNAVAILABLE', 'EX_USAGE', 'SYSTEM',
+    'IS_macOS', 'IS_WINDOWS', 'pyimplementation', 'LockFailed',
+    'get_fdmax', 'Pidfile', 'create_pidlock',
+    'close_open_fds', 'DaemonContext', 'detached', 'parse_uid',
+    'parse_gid', 'setgroups', 'initgroups', 'setgid', 'setuid',
+    'maybe_drop_privileges', 'signals', 'set_process_title',
+    'set_mp_process_title', 'get_errno_name', 'ignore_errno',
+    'fd_by_path', 'isatty',
+]
 
 # exitcodes
 EX_OK = getattr(os, 'EX_OK', 0)
@@ -124,10 +120,11 @@ class Pidfile(object):
 
     This is the type returned by :func:`create_pidlock`.
 
-    TIP: Use the :func:`create_pidlock` function instead,
-    which is more convenient and also removes stale pidfiles (when
-    the process holding the lock is no longer running).
-
+    See Also:
+        Best practice is to not use this directly but rather use
+        the :func:`create_pidlock` function instead,
+        which is more convenient and also removes stale pidfiles (when
+        the process holding the lock is no longer running).
     """
 
     #: Path to the pid lock file.
@@ -234,14 +231,11 @@ def create_pidlock(pidfile):
     to release the lock at exit, you can skip this by calling
     :func:`_create_pidlock` instead.
 
-    :returns: :class:`Pidfile`.
-
-    **Example**:
-
-    .. code-block:: python
-
-        pidlock = create_pidlock('/var/run/app.pid')
+    Returns:
+       Pidfile: used to manage the lock.
 
+    Example:
+        >>> pidlock = create_pidlock('/var/run/app.pid')
     """
     pidlock = _create_pidlock(pidfile)
     atexit.register(pidlock.release)
@@ -263,16 +257,14 @@ def fd_by_path(paths):
     This method returns list of file descriptors corresponding to
     file paths passed in paths variable.
 
-    :keyword paths: List of file paths.
+    Arguments:
+        paths: List[str]: List of file paths.
 
-    :returns: :list:.
+    Returns:
+        List[int]: List of file descriptors.
 
-    **Example**:
-
-    .. code-block:: python
-
-        keep = fd_by_path(['/dev/urandom',
-                           '/my/precious/'])
+    Example:
+        >>> keep = fd_by_path(['/dev/urandom', '/my/precious/'])
     """
     stats = set()
     for path in paths:
@@ -295,6 +287,7 @@ def fd_by_path(paths):
 
 
 class DaemonContext(object):
+
     _is_open = False
 
     def __init__(self, pidfile=None, workdir=None, umask=None,
@@ -360,38 +353,38 @@ def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
              workdir=None, fake=False, **opts):
     """Detach the current process in the background (daemonize).
 
-    :keyword logfile: Optional log file.  The ability to write to this file
-       will be verified before the process is detached.
-    :keyword pidfile: Optional pidfile.  The pidfile will not be created,
-      as this is the responsibility of the child.  But the process will
-      exit if the pid lock exists and the pid written is still running.
-    :keyword uid: Optional user id or user name to change
-      effective privileges to.
-    :keyword gid: Optional group id or group name to change effective
-      privileges to.
-    :keyword umask: Optional umask that will be effective in the child process.
-    :keyword workdir: Optional new working directory.
-    :keyword fake: Don't actually detach, intended for debugging purposes.
-    :keyword \*\*opts: Ignored.
-
-    **Example**:
-
-    .. code-block:: python
-
-        from celery.platforms import detached, create_pidlock
-
-        with detached(logfile='/var/log/app.log', pidfile='/var/run/app.pid',
-                      uid='nobody'):
-            # Now in detached child process with effective user set to nobody,
-            # and we know that our logfile can be written to, and that
-            # the pidfile is not locked.
-            pidlock = create_pidlock('/var/run/app.pid')
-
-            # Run the program
-            program.run(logfile='/var/log/app.log')
-
+    Arguments:
+        logfile (str): Optional log file.
+            The ability to write to this file
+            will be verified before the process is detached.
+        pidfile (str): Optional pid file.
+            The pidfile will not be created,
+            as this is the responsibility of the child.  But the process will
+            exit if the pid lock exists and the pid written is still running.
+        uid (int, str): Optional user id or user name to change
+            effective privileges to.
+        gid (int, str): Optional group id or group name to change
+            effective privileges to.
+        umask (str, int): Optional umask that will be effective in
+            the child process.
+        workdir (str): Optional new working directory.
+        fake (bool): Don't actually detach, intended for debugging purposes.
+        **opts (Any): Ignored.
+
+    Example:
+        >>> from celery.platforms import detached, create_pidlock
+        >>> with detached(
+        ...           logfile='/var/log/app.log',
+        ...           pidfile='/var/run/app.pid',
+        ...           uid='nobody'):
+        ... # Now in detached child process with effective user set to nobody,
+        ... # and we know that our logfile can be written to, and that
+        ... # the pidfile is not locked.
+        ... pidlock = create_pidlock('/var/run/app.pid')
+        ...
+        ... # Run the program
+        ... program.run(logfile='/var/log/app.log')
     """
-
     if not resource:
         raise RuntimeError('This platform does not support detach.')
     workdir = os.getcwd() if workdir is None else workdir
@@ -415,9 +408,10 @@ def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0,
 def parse_uid(uid):
     """Parse user id.
 
-    uid can be an integer (uid) or a string (user name), if a user name
-    the uid is taken from the system user registry.
-
+    Arguments:
+        uid (str, int): Actual uid, or the username of a user.
+    Returns:
+        int: The actual uid.
     """
     try:
         return int(uid)
@@ -431,9 +425,10 @@ def parse_uid(uid):
 def parse_gid(gid):
     """Parse group id.
 
-    gid can be an integer (gid) or a string (group name), if a group name
-    the gid is taken from the system group registry.
-
+    Arguments:
+        gid (str, int): Actual gid, or the name of a group.
+    Returns:
+        int: The actual gid of the group.
     """
     try:
         return int(gid)
@@ -512,7 +507,6 @@ def maybe_drop_privileges(uid=None, gid=None):
     changed to the users primary group.
 
     If only GID is specified, only the group is changed.
-
     """
     if sys.platform == 'win32':
         return
@@ -563,10 +557,7 @@ class Signals(object):
     If the requested signal is not supported on the current platform,
     the operation will be ignored.
 
-    **Examples**:
-
-    .. code-block:: pycon
-
+    Example:
         >>> from celery.platforms import signals
 
         >>> from proj.handlers import my_handler
@@ -593,7 +584,6 @@ class Signals(object):
         >>> signals.update(INT=exit_handler,
         ...                TERM=exit_handler,
         ...                HUP=hup_handler)
-
     """
 
     ignored = _signal.SIG_IGN
@@ -639,18 +629,16 @@ class Signals(object):
     def reset(self, *signal_names):
         """Reset signals to the default signal handler.
 
-        Does nothing if the platform doesn't support signals,
+        Does nothing if the platform has no support for signals,
         or the specified signal in particular.
-
         """
         self.update((sig, self.default) for sig in signal_names)
 
     def ignore(self, *signal_names):
         """Ignore signal using :const:`SIG_IGN`.
 
-        Does nothing if the platform doesn't support signals,
+        Does nothing if the platform has no support for signals,
         or the specified signal in particular.
-
         """
         self.update((sig, self.ignored) for sig in signal_names)
 
@@ -660,9 +648,8 @@ class Signals(object):
     def __setitem__(self, signal_name, handler):
         """Install signal handler.
 
-        Does nothing if the current platform doesn't support signals,
+        Does nothing if the current platform has no support for signals,
         or the specified signal in particular.
-
         """
         try:
             _signal.signal(self.signum(signal_name), handler)
@@ -692,7 +679,6 @@ def set_process_title(progname, info=None):
     """Set the :command:`ps` name for the currently running process.
 
     Only works if :pypi:`setproctitle` is installed.
-
     """
     proctitle = '[{0}]'.format(progname)
     proctitle = '{0} {1}'.format(proctitle, info) if info else proctitle
@@ -712,7 +698,6 @@ else:
         process name.
 
         Only works if :pypi:`setproctitle` is installed.
-
         """
         if hostname:
             progname = '{0}: {1}'.format(progname, hostname)
@@ -741,8 +726,9 @@ def ignore_errno(*errnos, **kwargs):
         >>> with ignore_errno(errno.ENOENT, errno.EPERM):
         ...    pass
 
-    :keyword types: A tuple of exceptions to ignore (when the errno matches),
-                    defaults to :exc:`Exception`.
+    Arguments:
+        types (Tuple[Exception]): A tuple of exceptions to ignore
+            (when the errno matches).  Defaults to :exc:`Exception`.
     """
     types = kwargs.get('types') or (Exception,)
     errnos = [get_errno_name(errno) for errno in errnos]

+ 111 - 127
celery/result.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.result
-    ~~~~~~~~~~~~~
-
-    Task results/state and groups of results.
-
-"""
+"""Task results/state and results for groups of tasks."""
 from __future__ import absolute_import, unicode_literals
 
 import time
@@ -72,9 +66,9 @@ class ResultBase(object):
 class AsyncResult(ResultBase):
     """Query task state.
 
-    :param id: see :attr:`id`.
-    :keyword backend: see :attr:`backend`.
-
+    Arguments:
+        id (str): See :attr:`id`.
+        backend (Backend): See :attr:`backend`.
     """
     app = None
 
@@ -124,15 +118,16 @@ class AsyncResult(ResultBase):
         Any worker receiving the task, or having reserved the
         task, *must* ignore it.
 
-        :keyword terminate: Also terminate the process currently working
-            on the task (if any).
-        :keyword signal: Name of signal to send to process if terminate.
-            Default is TERM.
-        :keyword wait: Wait for replies from workers.  Will wait for 1 second
-           by default or you can specify a custom ``timeout``.
-        :keyword timeout: Time in seconds to wait for replies if ``wait``
-                          enabled.
-
+        Arguments:
+            terminate (bool): Also terminate the process currently working
+                on the task (if any).
+            signal (str): Name of signal to send to process if terminate.
+                Default is TERM.
+            wait (bool): Wait for replies from workers.
+                The ``timeout`` argument specifies the seconds to wait.
+                Disabled by default.
+            timeout (float): Time in seconds to wait for replies when
+                ``wait`` is enabled.
         """
         self.app.control.revoke(self.id, connection=connection,
                                 terminate=terminate, signal=signal,
@@ -144,30 +139,30 @@ class AsyncResult(ResultBase):
             PROPAGATE_STATES=states.PROPAGATE_STATES):
         """Wait until task is ready, and return its result.
 
-        .. warning::
-
+        Warning:
            Waiting for tasks within a task may lead to deadlocks.
            Please read :ref:`task-synchronous-subtasks`.
 
-        :keyword timeout: How long to wait, in seconds, before the
-                          operation times out.
-        :keyword propagate: Re-raise exception if the task failed.
-        :keyword interval: Time to wait (in seconds) before retrying to
-           retrieve the result.  Note that this does not have any effect
-           when using the RPC/redis result store backends, as they do not
-           use polling.
-        :keyword no_ack: Enable amqp no ack (automatically acknowledge
-            message).  If this is :const:`False` then the message will
-            **not be acked**.
-        :keyword follow_parents: Re-raise any exception raised by parent task.
-
-        :raises celery.exceptions.TimeoutError: if `timeout` is not
-            :const:`None` and the result does not arrive within `timeout`
-            seconds.
-
-        If the remote call raised an exception then that exception will
-        be re-raised.
-
+        Arguments:
+            timeout (float): How long to wait, in seconds, before the
+                operation times out.
+            propagate (bool): Re-raise exception if the task failed.
+            interval (float): Time to wait (in seconds) before retrying to
+                retrieve the result.  Note that this does not have any effect
+                when using the RPC/redis result store backends, as they do not
+                use polling.
+            no_ack (bool): Enable amqp no ack (automatically acknowledge
+                message).  If this is :const:`False` then the message will
+                **not be acked**.
+            follow_parents (bool): Re-raise any exception raised by
+                parent tasks.
+
+        Raises:
+            celery.exceptions.TimeoutError: if `timeout` is not
+                :const:`None` and the result does not arrive within
+                `timeout` seconds.
+            Exception: If the remote call raised an exception then that
+                exception will be re-raised in the caller process.
         """
         assert_will_not_block()
         _on_interval = promise()
@@ -228,12 +223,6 @@ class AsyncResult(ResultBase):
             def pow2(i):
                 return i ** 2
 
-        Note that the ``trail`` option must be enabled
-        so that the list of children is stored in ``result.children``.
-        This is the default but enabled explicitly for illustration.
-
-        Calling :meth:`collect` would return:
-
         .. code-block:: pycon
 
             >>> from celery.result import ResultBase
@@ -244,6 +233,14 @@ class AsyncResult(ResultBase):
             ...  if not isinstance(v, (ResultBase, tuple))]
             [0, 1, 4, 9, 16, 25, 36, 49, 64, 81]
 
+        Note:
+            The ``Task.trail`` option must be enabled
+            so that the list of children is stored in ``result.children``.
+            This is the default but enabled explicitly for illustration.
+
+        Yields:
+            Tuple[AsyncResult, Any]: tuples containing the result instance
+            of the child task, and the return value of that task.
         """
         for _, R in self.iterdeps(intermediate=intermediate):
             yield R, R.get(**kwargs)
@@ -271,7 +268,6 @@ class AsyncResult(ResultBase):
 
         If the task is still running, pending, or is waiting
         for retry then :const:`False` is returned.
-
         """
         return self.state in self.backend.READY_STATES
 
@@ -422,7 +418,6 @@ class AsyncResult(ResultBase):
 
                 The task executed successfully. The :attr:`result` attribute
                 then contains the tasks return value.
-
         """
         return self._get_task_meta()['status']
     status = state  # XXX compat
@@ -442,8 +437,8 @@ class AsyncResult(ResultBase):
 class ResultSet(ResultBase):
     """Working with more than one result.
 
-    :param results: List of result instances.
-
+    Arguments:
+        results (Sequence[AsyncResult]): List of result instances.
     """
     _app = None
 
@@ -463,7 +458,6 @@ class ResultSet(ResultBase):
         """Add :class:`AsyncResult` as a new member of the set.
 
         Does nothing if the result is already a member.
-
         """
         if result not in self.results:
             self.results.append(result)
@@ -479,8 +473,8 @@ class ResultSet(ResultBase):
     def remove(self, result):
         """Remove result from the set; it must be a member.
 
-        :raises KeyError: if the result is not a member.
-
+        Raises:
+            KeyError: if the result is not a member.
         """
         if isinstance(result, string_t):
             result = self.app.AsyncResult(result)
@@ -490,11 +484,8 @@ class ResultSet(ResultBase):
             raise KeyError(result)
 
     def discard(self, result):
-        """Remove result from the set if it is a member.
-
-        If it is not a member, do nothing.
-
-        """
+        """Remove result from the set if it is a member,
+        or do nothing if it's not."""
         try:
             self.remove(result)
         except KeyError:
@@ -512,18 +503,18 @@ class ResultSet(ResultBase):
     def successful(self):
         """Was all of the tasks successful?
 
-        :returns: :const:`True` if all of the tasks finished
-            successfully (i.e. did not raise an exception).
-
+        Returns:
+            bool: true if all of the tasks finished
+                successfully (i.e. did not raise an exception).
         """
         return all(result.successful() for result in self.results)
 
     def failed(self):
         """Did any of the tasks fail?
 
-        :returns: :const:`True` if one of the tasks failed.
-            (i.e., raised an exception)
-
+        Returns:
+            bool: true if one of the tasks failed.
+                (i.e., raised an exception)
         """
         return any(result.failed() for result in self.results)
 
@@ -534,26 +525,25 @@ class ResultSet(ResultBase):
     def waiting(self):
         """Are any of the tasks incomplete?
 
-        :returns: :const:`True` if one of the tasks are still
-            waiting for execution.
-
+        Returns:
+            bool: true if one of the tasks are still
+                waiting for execution.
         """
         return any(not result.ready() for result in self.results)
 
     def ready(self):
         """Did all of the tasks complete? (either by success of failure).
 
-        :returns: :const:`True` if all of the tasks has been
-            executed.
-
+        Returns:
+            bool: true if all of the tasks have been executed.
         """
         return all(result.ready() for result in self.results)
 
     def completed_count(self):
         """Task completion count.
 
-        :returns: the number of tasks completed.
-
+        Returns:
+            int: the number of tasks completed.
         """
         return sum(int(result.successful()) for result in self.results)
 
@@ -566,15 +556,16 @@ class ResultSet(ResultBase):
                wait=False, timeout=None):
         """Send revoke signal to all workers for all tasks in the set.
 
-        :keyword terminate: Also terminate the process currently working
-            on the task (if any).
-        :keyword signal: Name of signal to send to process if terminate.
-            Default is TERM.
-        :keyword wait: Wait for replies from worker.  Will wait for 1 second
-           by default or you can specify a custom ``timeout``.
-        :keyword timeout: Time in seconds to wait for replies if ``wait``
-                          enabled.
-
+        Arguments:
+            terminate (bool): Also terminate the process currently working
+                on the task (if any).
+            signal (str): Name of signal to send to process if terminate.
+                Default is TERM.
+            wait (bool): Wait for replies from worker.
+                The ``timeout`` argument specifies the number of seconds
+                to wait.  Disabled by default.
+            timeout (float): Time in seconds to wait for replies when
+                the ``wait`` argument is enabled.
         """
         self.app.control.revoke([r.id for r in self.results],
                                 connection=connection, timeout=timeout,
@@ -618,7 +609,6 @@ class ResultSet(ResultBase):
         This is here for API compatibility with :class:`AsyncResult`,
         in addition it uses :meth:`join_native` if available for the
         current result backend.
-
         """
         if self._cache is not None:
             return self._cache
@@ -632,50 +622,44 @@ class ResultSet(ResultBase):
              callback=None, no_ack=True, on_message=None, on_interval=None):
         """Gathers the results of all tasks as a list in order.
 
-        .. note::
-
+        Note:
             This can be an expensive operation for result store
             backends that must resort to polling (e.g. database).
 
             You should consider using :meth:`join_native` if your backend
             supports it.
 
-        .. warning::
-
+        Warning:
             Waiting for tasks within a task may lead to deadlocks.
             Please see :ref:`task-synchronous-subtasks`.
 
-        :keyword timeout: The number of seconds to wait for results before
-                          the operation times out.
-
-        :keyword propagate: If any of the tasks raises an exception, the
-                            exception will be re-raised.
-
-        :keyword interval: Time to wait (in seconds) before retrying to
-                           retrieve a result from the set.  Note that this
-                           does not have any effect when using the amqp
-                           result store backend, as it does not use polling.
-
-        :keyword callback: Optional callback to be called for every result
-                           received.  Must have signature ``(task_id, value)``
-                           No results will be returned by this function if
-                           a callback is specified.  The order of results
-                           is also arbitrary when a callback is used.
-                           To get access to the result object for a particular
-                           id you will have to generate an index first:
-                           ``index = {r.id: r for r in gres.results.values()}``
-                           Or you can create new result objects on the fly:
-                           ``result = app.AsyncResult(task_id)`` (both will
-                           take advantage of the backend cache anyway).
-
-        :keyword no_ack: Automatic message acknowledgment (Note that if this
-            is set to :const:`False` then the messages *will not be
-            acknowledged*).
-
-        :raises celery.exceptions.TimeoutError: if ``timeout`` is not
-            :const:`None` and the operation takes longer than ``timeout``
-            seconds.
-
+        Arguments:
+            timeout (float): The number of seconds to wait for results
+                before the operation times out.
+            propagate (bool): If any of the tasks raises an exception,
+                the exception will be re-raised when this flag is set.
+            interval (float): Time to wait (in seconds) before retrying to
+                retrieve a result from the set.  Note that this does not have
+                any effect when using the amqp result store backend,
+                as it does not use polling.
+            callback (Callable): Optional callback to be called for every
+                result received.  Must have signature ``(task_id, value)``
+                No results will be returned by this function if a callback
+                is specified.  The order of results is also arbitrary when a
+                callback is used.  To get access to the result object for
+                a particular id you will have to generate an index first:
+                ``index = {r.id: r for r in gres.results.values()}``
+                Or you can create new result objects on the fly:
+                ``result = app.AsyncResult(task_id)`` (both will
+                take advantage of the backend cache anyway).
+            no_ack (bool): Automatic message acknowledgment (Note that if this
+                is set to :const:`False` then the messages
+                *will not be acknowledged*).
+
+        Raises:
+            celery.exceptions.TimeoutError: if ``timeout`` is not
+                :const:`None` and the operation takes longer than ``timeout``
+                seconds.
         """
         assert_will_not_block()
         time_start = monotonic()
@@ -716,7 +700,6 @@ class ResultSet(ResultBase):
 
         This is currently only supported by the amqp, Redis and cache
         result backends.
-
         """
         return self.backend.iter_native(
             self,
@@ -736,7 +719,6 @@ class ResultSet(ResultBase):
 
         This is currently only supported by the amqp, Redis and cache
         result backends.
-
         """
         assert_will_not_block()
         order_index = None if callback else {
@@ -813,9 +795,9 @@ class GroupResult(ResultSet):
     It enables inspection of the tasks state and return values as
     a single entity.
 
-    :param id: The id of the group.
-    :param results: List of result instances.
-
+    Arguments:
+        id (str): The id of the group.
+        results (Sequence[AsyncResult]): List of result instances.
     """
 
     #: The UUID of the group.
@@ -831,12 +813,10 @@ class GroupResult(ResultSet):
     def save(self, backend=None):
         """Save group-result for later retrieval using :meth:`restore`.
 
-        Example::
-
+        Example:
             >>> def save_and_restore(result):
             ...     result.save()
             ...     result = GroupResult.restore(result.id)
-
         """
         return (backend or self.app.backend).save_group(self.id, self)
 
@@ -934,8 +914,12 @@ class EagerResult(AsyncResult):
 
     @property
     def _cache(self):
-        return {'task_id': self.id, 'result': self._result, 'status':
-                self._state, 'traceback': self._traceback}
+        return {
+            'task_id': self.id,
+            'result': self._result,
+            'status': self._state,
+            'traceback': self._traceback,
+        }
 
     @property
     def result(self):

+ 34 - 35
celery/schedules.py

@@ -1,12 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.schedules
-    ~~~~~~~~~~~~~~~~
-
-    Schedules define the intervals at which periodic tasks
-    should run.
-
-"""
+"""Schedules define the intervals at which periodic tasks run."""
 from __future__ import absolute_import, unicode_literals
 
 import numbers
@@ -26,12 +19,13 @@ from .utils.timeutils import (
     timezone, maybe_make_aware, ffwd, localize
 )
 
-__all__ = ['ParseException', 'schedule', 'crontab', 'crontab_parser',
-           'maybe_schedule', 'solar']
+__all__ = [
+    'ParseException', 'schedule', 'crontab', 'crontab_parser',
+    'maybe_schedule', 'solar',
+]
 
 schedstate = namedtuple('schedstate', ('is_due', 'next'))
 
-
 CRON_PATTERN_INVALID = """\
 Invalid crontab pattern. Valid range is {min}-{max}. \
 '{value}' was found.\
@@ -72,13 +66,13 @@ class ParseException(Exception):
 class schedule(object):
     """Schedule for periodic task.
 
-    :param run_every: Interval in seconds (or a :class:`~datetime.timedelta`).
-    :keyword relative:  If set to True the run time will be rounded to the
-        resolution of the interval.
-    :keyword nowfun: Function returning the current date and time
-        (class:`~datetime.datetime`).
-    :keyword app: Celery app instance.
-
+    Arguments:
+        run_every (float, ~datetime.timedelta): Time interval.
+        relative (bool):  If set to True the run time will be rounded to the
+            resolution of the interval.
+        nowfun (Callable): Function returning the current date and time
+            (class:`~datetime.datetime`).
+        app (~@Celery): Celery app instance.
     """
     relative = False
 
@@ -124,7 +118,6 @@ class schedule(object):
             For the default scheduler the value is 5 minutes, but for e.g.
             the :pypi:`django-celery` database scheduler the value
             is 5 seconds.
-
         """
         last_run_at = self.maybe_make_aware(last_run_at)
         rem_delta = self.remaining_estimate(last_run_at)
@@ -224,7 +217,6 @@ class crontab_parser(object):
     The maximum possible expanded value returned is found by the formula:
 
         :math:`max_ + min_ - 1`
-
     """
     ParseException = ParseException
 
@@ -377,7 +369,6 @@ class crontab(schedule):
     execution events.  Or, ``day_of_week`` is 1 and ``day_of_month``
     is '1-7,15-21' means every first and third Monday of every month
     present in ``month_of_year``.
-
     """
 
     def __init__(self, minute='*', hour='*', day_of_week='*',
@@ -419,7 +410,6 @@ class crontab(schedule):
         the expansion of ``*`` and ranges for 1-based cronspecs, such as
         day of month or month of year.  The default is sufficient for minute,
         hour, and day of week.
-
         """
         if isinstance(cronspec, numbers.Integral):
             result = {cronspec}
@@ -446,7 +436,6 @@ class crontab(schedule):
 
         Only called when ``day_of_month`` and/or ``month_of_year``
         cronspec is specified to further limit scheduled task execution.
-
         """
         datedata = AttributeDict(year=last_run_at.year)
         days_of_month = sorted(self.day_of_month)
@@ -589,7 +578,6 @@ class crontab(schedule):
         where next time to run is in seconds.
 
         See :meth:`celery.schedules.schedule.is_due` for more information.
-
         """
         rem_delta = self.remaining_estimate(last_run_at)
         rem = max(rem_delta.total_seconds(), 0)
@@ -633,16 +621,28 @@ class solar(schedule):
     """A solar event can be used as the ``run_every`` value of a
     periodic task entry to schedule based on certain solar events.
 
-    :param event: Solar event that triggers this task. Available
-        values are: ``dawn_astronomical``, ``dawn_nautical``, ``dawn_civil``,
-        ``sunrise``, ``solar_noon``, ``sunset``, ``dusk_civil``,
-        ``dusk_nautical``, ``dusk_astronomical``.
-    :param lat: The latitude of the observer.
-    :param lon: The longitude of the observer.
-    :param nowfun: Function returning the current date and time
-        (class:`~datetime.datetime`).
-    :param app: Celery app instance.
-
+    Notes:
+
+        Available event valus are:
+
+            - ``dawn_astronomical``
+            - ``dawn_nautical``
+            - ``dawn_civil``
+            - ``sunrise``
+            - ``solar_noon``
+            - ``sunset``
+            - ``dusk_civil``
+            - ``dusk_nautical``
+            - ``dusk_astronomical``
+
+    Arguments:
+        event (str): Solar event that triggers this task.
+            See note for available values.
+        lat (int): The latitude of the observer.
+        lon (int): The longitude of the observer.
+        nowfun (Callable): Function returning the current date and time
+            as a class:`~datetime.datetime`.
+        app (~@Celery): Celery app instance.
     """
 
     _all_events = {
@@ -756,7 +756,6 @@ class solar(schedule):
         where next time to run is in seconds.
 
         See :meth:`celery.schedules.schedule.is_due` for more information.
-
         """
         rem_delta = self.remaining_estimate(last_run_at)
         rem = max(rem_delta.total_seconds(), 0)

+ 1 - 7
celery/security/__init__.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.security
-    ~~~~~~~~~~~~~~~
-
-    Module implementing the signing message serializer.
-
-"""
+"""Message Signing Serializer."""
 from __future__ import absolute_import, unicode_literals
 
 from kombu.serialization import (

+ 1 - 7
celery/security/certificate.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.security.certificate
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    X.509 certificates.
-
-"""
+"""X.509 certificates."""
 from __future__ import absolute_import, unicode_literals
 
 import glob

+ 1 - 7
celery/security/key.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.security.key
-    ~~~~~~~~~~~~~~~~~~~
-
-    Private key for the security serializer.
-
-"""
+"""Private keys for the security serializer."""
 from __future__ import absolute_import, unicode_literals
 
 from kombu.utils.encoding import ensure_bytes

+ 1 - 7
celery/security/serialization.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.security.serialization
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Secure serializer.
-
-"""
+"""Secure serializer."""
 from __future__ import absolute_import, unicode_literals
 
 import sys

+ 1 - 7
celery/security/utils.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.security.utils
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Utilities used by the message signing serializer.
-
-"""
+"""Utilities used by the message signing serializer."""
 from __future__ import absolute_import, unicode_literals
 
 import sys

+ 6 - 9
celery/signals.py

@@ -1,16 +1,13 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.signals
-    ~~~~~~~~~~~~~~
-
-    This module defines the signals (Observer pattern) sent by
-    both workers and clients.
+"""Celery Signals.
 
-    Functions can be connected to these signals, and connected
-    functions are called whenever a signal is called.
+This module defines the signals (Observer pattern) sent by
+both workers and clients.
 
-    See :ref:`signals` for more information.
+Functions can be connected to these signals, and connected
+functions are called whenever a signal is called.
 
+See :ref:`signals` for more information.
 """
 from __future__ import absolute_import, unicode_literals
 

+ 20 - 22
celery/states.py

@@ -1,9 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-celery.states
-=============
-
-Built-in task states.
+"""Built-in task states.
 
 .. _states:
 
@@ -52,29 +48,32 @@ ALL_STATES
 
 Set of all possible states.
 
-
 Misc.
 -----
 
 """
 from __future__ import absolute_import, unicode_literals
 
-__all__ = ['PENDING', 'RECEIVED', 'STARTED', 'SUCCESS', 'FAILURE',
-           'REVOKED', 'RETRY', 'IGNORED', 'READY_STATES', 'UNREADY_STATES',
-           'EXCEPTION_STATES', 'PROPAGATE_STATES', 'precedence', 'state']
+__all__ = [
+    'PENDING', 'RECEIVED', 'STARTED', 'SUCCESS', 'FAILURE',
+    'REVOKED', 'RETRY', 'IGNORED', 'READY_STATES', 'UNREADY_STATES',
+    'EXCEPTION_STATES', 'PROPAGATE_STATES', 'precedence', 'state',
+]
 
 #: State precedence.
 #: None represents the precedence of an unknown state.
 #: Lower index means higher precedence.
-PRECEDENCE = ['SUCCESS',
-              'FAILURE',
-              None,
-              'REVOKED',
-              'STARTED',
-              'RECEIVED',
-              'REJECTED',
-              'RETRY',
-              'PENDING']
+PRECEDENCE = [
+    'SUCCESS',
+    'FAILURE',
+    None,
+    'REVOKED',
+    'STARTED',
+    'RECEIVED',
+    'REJECTED',
+    'RETRY',
+    'PENDING',
+]
 
 #: Hash lookup of PRECEDENCE to index
 PRECEDENCE_LOOKUP = dict(zip(PRECEDENCE, range(0, len(PRECEDENCE))))
@@ -85,7 +84,6 @@ def precedence(state):
     """Get the precedence index for state.
 
     Lower index means higher precedence.
-
     """
     try:
         return PRECEDENCE_LOOKUP[state]
@@ -110,7 +108,6 @@ class state(str):
 
         >>> state('PROGRESS') > state('SUCCESS')
         False
-
     """
 
     def __gt__(self, other):
@@ -149,5 +146,6 @@ UNREADY_STATES = frozenset({PENDING, RECEIVED, STARTED, REJECTED, RETRY})
 EXCEPTION_STATES = frozenset({RETRY, FAILURE, REVOKED})
 PROPAGATE_STATES = frozenset({FAILURE, REVOKED})
 
-ALL_STATES = frozenset({PENDING, RECEIVED, STARTED,
-                        SUCCESS, FAILURE, RETRY, REVOKED})
+ALL_STATES = frozenset({
+    PENDING, RECEIVED, STARTED, SUCCESS, FAILURE, RETRY, REVOKED,
+})

+ 5 - 8
celery/task/__init__.py

@@ -1,13 +1,10 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.task
-    ~~~~~~~~~~~
-
-    This is the old task module, it should not be used anymore,
-    import from the main 'celery' module instead.
-    If you're looking for the decorator implementation then that's in
-    ``celery.app.base.Celery.task``.
+"""Old deprecated task module.
 
+This is the old task module, it should not be used anymore,
+import from the main 'celery' module instead.
+If you're looking for the decorator implementation then that's in
+``celery.app.base.Celery.task``.
 """
 from __future__ import absolute_import, unicode_literals
 

+ 4 - 11
celery/task/base.py

@@ -1,13 +1,10 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.task.base
-    ~~~~~~~~~~~~~~~~
-
-    The task implementation has been moved to :mod:`celery.app.task`.
+"""Deprecated task base class.
 
-    This contains the backward compatible Task class used in the old API,
-    and shouldn't be used in new applications.
+The task implementation has been moved to :mod:`celery.app.task`.
 
+This contains the backward compatible Task class used in the old API,
+and shouldn't be used in new applications.
 """
 from __future__ import absolute_import, unicode_literals
 
@@ -58,7 +55,6 @@ class TaskType(type):
 
     If no :attr:`Task.name` attribute is provided, then the name is generated
     from the module and class name.
-
     """
     _creation_count = {}  # used by old non-abstract task classes
 
@@ -133,7 +129,6 @@ class Task(BaseTask):
     """Deprecated Task base class.
 
     Modern applications should use :class:`celery.Task` instead.
-
     """
     abstract = True
     __bound__ = False
@@ -219,7 +214,6 @@ class Task(BaseTask):
 
                 with app.producer_or_acquire() as prod:
                     my_task.apply_async(producer=prod)
-
         """
         exchange = self.exchange if exchange is None else exchange
         if exchange_type is None:
@@ -237,7 +231,6 @@ class Task(BaseTask):
         this task is sent to.
 
         Should be replaced with :class:`@amqp.TaskConsumer` instead:
-
         """
         Q = self._get_app().amqp
         connection = connection or self.establish_connection()

+ 0 - 1
celery/tests/security/__init__.py

@@ -2,7 +2,6 @@
 Keys and certificates for tests (KEY1 is a private key of CERT1, etc.)
 
 Generated with `extra/security/get-cert.sh`
-
 """
 from __future__ import absolute_import, unicode_literals
 

+ 1 - 3
celery/tests/security/test_security.py

@@ -1,5 +1,4 @@
-"""
-Keys and certificates for tests (KEY1 is a private key of CERT1, etc.)
+"""Keys and certificates for tests (KEY1 is a private key of CERT1, etc.)
 
 Generated with:
 
@@ -12,7 +11,6 @@ Generated with:
     $ openssl x509 -req -days 365 -in cert1.csr \
               -signkey key1.key -out cert1.crt
     $ rm key1.key.org cert1.csr
-
 """
 from __future__ import absolute_import, unicode_literals
 

+ 3 - 8
celery/utils/__init__.py

@@ -1,13 +1,8 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.utils
-    ~~~~~~~~~~~~
-
-    Utility functions.
-
-    Do not import from here directly anymore, these are only
-    here for backwards compatibility.
+"""Utility functions.
 
+Do not import from here directly anymore, as these are only
+here for backwards compatibility.
 """
 from __future__ import absolute_import, print_function, unicode_literals
 

+ 1 - 7
celery/utils/abstract.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.utils.abstract
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Abstract classes.
-
-"""
+"""Abstract classes."""
 from __future__ import absolute_import, unicode_literals
 
 from abc import ABCMeta, abstractmethod, abstractproperty

+ 26 - 41
celery/utils/collections.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.utils.collections``
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Custom maps, sets, sequences and other data structures.
-
-"""
+"""Custom maps, sets, sequences and other data structures."""
 from __future__ import absolute_import, unicode_literals
 
 import sys
@@ -59,7 +53,8 @@ def force_mapping(m):
 def lpmerge(L, R):
     """In place left precedent dictionary merge.
 
-    Keeps values from `L`, if the value in `R` is :const:`None`."""
+    Keeps values from `L`, if the value in `R` is :const:`None`.
+    """
     setitem = L.__setitem__
     [setitem(k, v) for k, v in items(R) if v is not None]
     return L
@@ -117,7 +112,6 @@ class AttributeDictMixin(object):
     """Augment classes with a Mapping interface by adding attribute access.
 
     I.e. `d.key -> d[key]`.
-
     """
 
     def __getattr__(self, k):
@@ -144,7 +138,6 @@ class DictAttribute(object):
 
     `obj[k] -> obj.k`
     `obj[k] = val -> obj.k = val`
-
     """
     obj = None
 
@@ -353,10 +346,10 @@ class ConfigurationView(ChainMap, AttributeDictMixin):
     If the key does not exist in ``changes``, the ``defaults``
     dictionaries are consulted.
 
-    :param changes:  Dict containing changes to the configuration.
-    :param defaults: List of dictionaries containing the default
-                     configuration.
-
+    Arguments:
+        changes (Mapping): Map of configuration changes.
+        defaults (List[Mapping]): List of dictionaries containing
+            the default configuration.
     """
 
     def __init__(self, changes, defaults=None, key_t=None, prefix=None):
@@ -438,33 +431,28 @@ class LimitedSet(object):
 
     All arguments are optional, and no limits are enabled by default.
 
-    :keyword maxlen: Optional max number of items.
-
-        Adding more items than ``maxlen`` will result in immediate
-        removal of items sorted by oldest insertion time.
+    Arguments:
+        maxlen (int): Optional max number of items.
+            Adding more items than ``maxlen`` will result in immediate
+            removal of items sorted by oldest insertion time.
 
-    :keyword expires: TTL for all items.
+        expires (float): TTL for all items.
+            Expired items are purged as keys are inserted.
 
-        Expired items are purged as keys are inserted.
+        minlen (int): Minimal residual size of this set.
+            .. versionadded:: 4.0
 
-    :keyword minlen: Minimal residual size of this set.
+            Value must be less than ``maxlen`` if both are configured.
 
-        .. versionadded:: 4.0
+            Older expired items will be deleted, only after the set
+            exceeds ``minlen`` number of items.
 
-        Value must be less than ``maxlen`` if both are configured.
-
-        Older expired items will be deleted, only after the set
-        exceeds ``minlen`` number of items.
-
-    :keyword data: Initial data to initialize set with.
-        Can be an iterable of ``(key, value)`` pairs,
-        a dict (``{key: insertion_time}``), or another instance
-        of :class:`LimitedSet`.
+        data (Sequence): Initial data to initialize set with.
+            Can be an iterable of ``(key, value)`` pairs,
+            a dict (``{key: insertion_time}``), or another instance
+            of :class:`LimitedSet`.
 
     Example:
-
-    .. code-block:: pycon
-
         >>> s = LimitedSet(maxlen=50000, expires=3600, minlen=4000)
         >>> for i in range(60000):
         ...     s.add(i)
@@ -481,7 +469,6 @@ class LimitedSet(object):
         4000
         >>>> 57000 in s  # even this item is gone now
         False
-
     """
 
     max_heap_percent_overload = 15
@@ -567,9 +554,9 @@ class LimitedSet(object):
     def purge(self, now=None):
         """Check oldest items and remove them if needed.
 
-        :keyword now: Time of purging -- by default right now.
-                      This can be useful for unit testing.
-
+        Arguments:
+            now (float): Time of purging -- by default right now.
+                This can be useful for unit testing.
         """
         now = now or time.time()
         now = now() if isinstance(now, Callable) else now
@@ -599,8 +586,7 @@ class LimitedSet(object):
     def as_dict(self):
         """Whole set as serializable dictionary.
 
-        Example::
-
+        Example:
             >>> s = LimitedSet(maxlen=200)
             >>> r = LimitedSet(maxlen=200)
             >>> for i in range(500):
@@ -609,7 +595,6 @@ class LimitedSet(object):
             >>> r.update(s.as_dict())
             >>> r == s
             True
-
         """
         return {key: inserted for inserted, key in values(self._data)}
 

+ 4 - 12
celery/utils/debug.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.utils.debug
-    ~~~~~~~~~~~~~~~~~~
-
-    Utilities for debugging memory usage.
-
-"""
+"""Utilities for debugging memory usage, blocking calls, etc."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
@@ -74,7 +68,6 @@ def sample_mem():
     """Sample RSS memory usage.
 
     Statistics can then be output by calling :func:`memdump`.
-
     """
     current_rss = mem_rss()
     _mem_sample.append(current_rss)
@@ -97,7 +90,6 @@ def memdump(samples=10, file=None):  # pragma: no cover
     Will print a sample of all RSS memory samples added by
     calling :func:`sample_mem`, and in addition print
     used RSS memory after :func:`gc.collect`.
-
     """
     say = partial(print, file=file)
     if ps() is None:
@@ -118,7 +110,6 @@ def sample(x, n, k=0):
     item is returned.
 
     ``k`` can be used as offset.
-
     """
     j = len(x) // n
     for _ in range(n):
@@ -132,8 +123,9 @@ def sample(x, n, k=0):
 def hfloat(f, p=5):
     """Convert float to value suitable for humans.
 
-    :keyword p: Float precision.
-
+    Arguments:
+        f (float): The floating point number.
+        p (int): Floating point precision (default is 5).
     """
     i = int(f)
     return i if i == f else '{0:.{p}}'.format(f, p=p)

+ 8 - 14
celery/utils/deprecated.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.utils.deprecated
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Deprecation utilities.
-
-"""
+"""Deprecation utilities."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import warnings
@@ -47,13 +41,13 @@ def Callable(deprecation=None, removal=None,
 
     A deprecation warning will be emitted when the function is called.
 
-    :keyword deprecation: Version that marks first deprecation, if this
-      argument is not set a ``PendingDeprecationWarning`` will be emitted
-      instead.
-    :keyword removal:  Future version when this feature will be removed.
-    :keyword alternative:  Instructions for an alternative solution (if any).
-    :keyword description: Description of what is being deprecated.
-
+    Arguments:
+        deprecation (str): Version that marks first deprecation, if this
+            argument is not set a ``PendingDeprecationWarning`` will be
+            emitted instead.
+        removal (str): Future version when this feature will be removed.
+        alternative (str): Instructions for an alternative solution (if any).
+        description (str): Description of what is being deprecated.
     """
     def _inner(fun):
 

+ 83 - 87
celery/utils/dispatch/saferef.py

@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-"Safe weakrefs", originally from :pypi:`pyDispatcher`.
+"""Safe weakrefs, originally from :pypi:`pyDispatcher`.
 
 Provides a way to safely weakref any function, including bound methods (which
 aren't handled by the core weakref module).
@@ -21,14 +20,15 @@ PY3 = sys.version_info[0] == 3
 def safe_ref(target, on_delete=None):  # pragma: no cover
     """Return a *safe* weak reference to a callable target
 
-    :param target: the object to be weakly referenced, if it's a
-        bound method reference, will create a :class:`BoundMethodWeakref`,
-        otherwise creates a simple :class:`weakref.ref`.
+    Arguments:
+        target (Any): The object to be weakly referenced, if it's a
+            bound method reference, will create a :class:`BoundMethodWeakref`,
+            otherwise creates a simple :class:`weakref.ref`.
 
-    :keyword on_delete: if provided, will have a hard reference stored
-        to the callable to be called after the safe reference
-        goes out of scope with the reference object, (either a
-        :class:`weakref.ref` or a :class:`BoundMethodWeakref`) as argument.
+        on_delete (Callable): If provided, will have a hard reference stored
+            to the callable to be called after the safe reference
+            goes out of scope with the reference object, (either a
+            :class:`weakref.ref` or a :class:`BoundMethodWeakref`) as argument.
     """
     if getattr(target, '__self__', None) is not None:
         # Turn a bound method into a BoundMethodWeakref instance.
@@ -55,39 +55,31 @@ class BoundMethodWeakref(object):  # pragma: no cover
     object keeps weak references to both the object and the
     function which together define the instance method.
 
-    .. attribute:: key
-
-        the identity key for the reference, calculated
-        by the class's :meth:`calculate_key` method applied to the
-        target instance method
-
-    .. attribute:: deletion_methods
-
-        sequence of callable objects taking
-        single argument, a reference to this object which
-        will be called when *either* the target object or
-        target function is garbage collected (i.e. when
-        this object becomes invalid).  These are specified
-        as the on_delete parameters of :func:`safe_ref` calls.
-
-    .. attribute:: weak_self
-
-        weak reference to the target object
+    Attributes:
 
-    .. attribute:: weak_fun
+        key (str): the identity key for the reference, calculated
+            by the class's :meth:`calculate_key` method applied to the
+            target instance method.
 
-        weak reference to the target function
+        deletion_methods (Sequence[Callable]): Callables taking
+            single argument, a reference to this object which
+            will be called when *either* the target object or
+            target function is garbage collected (i.e. when
+            this object becomes invalid).  These are specified
+            as the on_delete parameters of :func:`safe_ref` calls.
 
-    .. attribute:: _all_instances
+        weak_self (weakref.ref): weak reference to the target object.
 
-        class attribute pointing to all live
-        BoundMethodWeakref objects indexed by the class's
-        `calculate_key(target)` method applied to the target
-        objects. This weak value dictionary is used to
-        short-circuit creation so that multiple references
-        to the same (object, function) pair produce the
-        same BoundMethodWeakref instance.
+        weak_fun (weakref.ref): weak reference to the target function
 
+        _all_instances (weakref.WeakValueDictionary):
+            class attribute pointing to all live
+            BoundMethodWeakref objects indexed by the class's
+            `calculate_key(target)` method applied to the target
+            objects. This weak value dictionary is used to
+            short-circuit creation so that multiple references
+            to the same (object, function) pair produce the
+            same BoundMethodWeakref instance.
     """
 
     _all_instances = weakref.WeakValueDictionary()
@@ -95,15 +87,15 @@ class BoundMethodWeakref(object):  # pragma: no cover
     def __new__(cls, target, on_delete=None, *arguments, **named):
         """Create new instance or return current instance
 
-        Basically this method of construction allows us to
-        short-circuit creation of references to already-
-        referenced instance methods.  The key corresponding
-        to the target is calculated, and if there is already
-        an existing reference, that is returned, with its
-        deletionMethods attribute updated.  Otherwise the
-        new instance is created and registered in the table
-        of already-referenced methods.
-
+        Note:
+            Basically this method of construction allows us to
+            short-circuit creation of references to already-
+            referenced instance methods.  The key corresponding
+            to the target is calculated, and if there is already
+            an existing reference, that is returned, with its
+            deletionMethods attribute updated.  Otherwise the
+            new instance is created and registered in the table
+            of already-referenced methods.
         """
         key = cls.calculate_key(target)
         current = cls._all_instances.get(key)
@@ -117,22 +109,22 @@ class BoundMethodWeakref(object):  # pragma: no cover
             return base
 
     def __init__(self, target, on_delete=None):
-        """Return a weak-reference-like instance for a bound method
-
-        :param target: the instance-method target for the weak
-            reference, must have `__self__` and `__func__` attributes
-            and be reconstructable via::
+        """Return a weak-reference-like instance for a bound method.
 
-                target.__func__.__get__(target.__self__)
+        Arguments:
+            target (Any): The instance-method target for the weak
+                reference, must have `__self__` and `__func__` attributes
+                and be reconstructable via::
 
-            which is true of built-in instance methods.
+                    target.__func__.__get__(target.__self__)
 
-        :keyword on_delete: optional callback which will be called
-            when this weak reference ceases to be valid
-            (i.e. either the object or the function is garbage
-            collected).  Should take a single argument,
-            which will be passed a pointer to this object.
+                which is true of built-in instance methods.
 
+            on_delete (Callable): Optional callback which will be called
+                when this weak reference ceases to be valid
+                (i.e. either the object or the function is garbage
+                collected).  Should take a single argument,
+                which will be passed a pointer to this object.
         """
         def remove(weak, self=self):
             """Set self.is_dead to true when method or instance is destroyed"""
@@ -163,8 +155,10 @@ class BoundMethodWeakref(object):  # pragma: no cover
     def calculate_key(cls, target):
         """Calculate the reference key for this reference
 
-        Currently this is a two-tuple of the `id()`'s of the
-        target object and the target function respectively.
+        Returns:
+            Tuple[int, int]: Currently this is a two-tuple of
+                the `id()`'s of the target object and the target
+                function respectively.
         """
         return id(target.__self__), id(target.__func__)
     calculate_key = classmethod(calculate_key)
@@ -213,40 +207,42 @@ class BoundNonDescriptorMethodWeakref(BoundMethodWeakref):  # pragma: no cover
     """A specialized :class:`BoundMethodWeakref`, for platforms where
     instance methods are not descriptors.
 
-    It assumes that the function name and the target attribute name are the
-    same, instead of assuming that the function is a descriptor. This approach
-    is equally fast, but not 100% reliable because functions can be stored on
-    an attribute named differenty than the function's name such as in::
+    Warning:
 
-        >>> class A(object):
-        ...     pass
+        It assumes that the function name and the target attribute name are
+        the same, instead of assuming that the function is a descriptor.
+        This approach is equally fast, but not 100% reliable because
+        functions can be stored on an attribute named differenty than the
+        function's name such as in::
 
-        >>> def foo(self):
-        ...     return 'foo'
-        >>> A.bar = foo
+            >>> class A(object):
+            ...     pass
 
-    But this shouldn't be a common use case. So, on platforms where methods
-    aren't descriptors (such as Jython) this implementation has the advantage
-    of working in the most cases.
+            >>> def foo(self):
+            ...     return 'foo'
+            >>> A.bar = foo
 
+        But this shouldn't be a common use case. So, on platforms where methods
+        aren't descriptors (such as Jython) this implementation has the
+        advantage of working in the most cases.
     """
     def __init__(self, target, on_delete=None):
-        """Return a weak-reference-like instance for a bound method
+        """Return a weak-reference-like instance for a bound method.
 
-        :param target: the instance-method target for the weak
-            reference, must have `__self__` and `__func__` attributes
-            and be reconstructable via::
+        Arguments:
+            target (Any): the instance-method target for the weak
+                reference, must have `__self__` and `__func__` attributes
+                and be reconstructable via::
 
-                target.__func__.__get__(target.__self__)
+                    target.__func__.__get__(target.__self__)
 
-            which is true of built-in instance methods.
-
-        :keyword on_delete: optional callback which will be called
-            when this weak reference ceases to be valid
-            (i.e. either the object or the function is garbage
-            collected). Should take a single argument,
-            which will be passed a pointer to this object.
+                which is true of built-in instance methods.
 
+            on_delete (Callable): Optional callback which will be called
+                when this weak reference ceases to be valid
+                (i.e. either the object or the function is garbage
+                collected). Should take a single argument,
+                which will be passed a pointer to this object.
         """
         assert getattr(target.__self__, target.__name__) == target
         super(BoundNonDescriptorMethodWeakref, self).__init__(target,
@@ -260,9 +256,9 @@ class BoundNonDescriptorMethodWeakref(BoundMethodWeakref):  # pragma: no cover
         method for our object and function.
 
         Note:
+
             You may call this method any number of times,
             as it does not invalidate the reference.
-
         """
         target = self.weak_self()
         if target is not None:
@@ -285,5 +281,5 @@ def get_bound_method_weakref(target, on_delete):  # pragma: no cover
         return BoundMethodWeakref(target=target, on_delete=on_delete)
     else:
         # no luck, use the alternative implementation:
-        return BoundNonDescriptorMethodWeakref(target=target,
-                                               on_delete=on_delete)
+        return BoundNonDescriptorMethodWeakref(
+            target=target, on_delete=on_delete)

+ 37 - 38
celery/utils/dispatch/signal.py

@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-"""Signal class."""
+"""Implementation of the Observer pattern."""
 from __future__ import absolute_import, unicode_literals
 
 import weakref
@@ -32,9 +32,9 @@ def _make_id(target):  # pragma: no cover
 class Signal(object):  # pragma: no cover
     """Observer pattern implementation.
 
-    :param providing_args: A list of the arguments this signal can pass
-        along in a :meth:`send` call.
-
+    Arguments:
+        providing_args (List): A list of the arguments this signal can pass
+            along in a :meth:`send` call.
     """
 
     #: Holds a dictionary of
@@ -55,32 +55,32 @@ class Signal(object):  # pragma: no cover
     def connect(self, *args, **kwargs):
         """Connect receiver to sender for signal.
 
-        :param receiver: A function or an instance method which is to
-            receive signals. Receivers must be hashable objects.
-
-            if weak is :const:`True`, then receiver must be weak-referenceable
-            (more precisely :func:`saferef.safe_ref()` must be able to create a
-            reference to the receiver).
+        Arguments:
+            receiver (Callable): A function or an instance method which is to
+                receive signals. Receivers must be hashable objects.
 
-            Receivers must be able to accept keyword arguments.
+                if weak is :const:`True`, then receiver must be
+                weak-referenceable (more precisely :func:`saferef.safe_ref()`
+                must be able to create a reference to the receiver).
 
-            If receivers have a `dispatch_uid` attribute, the receiver will
-            not be added if another receiver already exists with that
-            `dispatch_uid`.
+                Receivers must be able to accept keyword arguments.
 
-        :keyword sender: The sender to which the receiver should respond.
-            Must either be of type :class:`Signal`, or :const:`None` to receive
-            events from any sender.
+                If receivers have a `dispatch_uid` attribute, the receiver will
+                not be added if another receiver already exists with that
+                `dispatch_uid`.
 
-        :keyword weak: Whether to use weak references to the receiver.
-            By default, the module will attempt to use weak references to the
-            receiver objects. If this parameter is false, then strong
-            references will be used.
+            sender (Any): The sender to which the receiver should respond.
+                Must either be of type :class:`Signal`, or :const:`None` to
+                receive events from any sender.
 
-        :keyword dispatch_uid: An identifier used to uniquely identify a
-            particular instance of a receiver. This will usually be a
-            string, though it may be anything hashable.
+            weak (bool): Whether to use weak references to the receiver.
+                By default, the module will attempt to use weak references to
+                the receiver objects. If this parameter is false, then strong
+                references will be used.
 
+            dispatch_uid (Hashable): An identifier used to uniquely identify a
+                particular instance of a receiver. This will usually be a
+                string, though it may be anything hashable.
         """
         def _handle_options(sender=None, weak=True, dispatch_uid=None):
 
@@ -124,16 +124,16 @@ class Signal(object):  # pragma: no cover
         If weak references are used, disconnect need not be called. The
         receiver will be removed from dispatch automatically.
 
-        :keyword receiver: The registered receiver to disconnect. May be
-            none if `dispatch_uid` is specified.
-
-        :keyword sender: The registered sender to disconnect.
+        Arguments:
+            receiver (Callable): The registered receiver to disconnect. May be
+                none if `dispatch_uid` is specified.
 
-        :keyword weak: The weakref state to disconnect.
+            sender (Any): The registered sender to disconnect.
 
-        :keyword dispatch_uid: the unique identifier of the receiver
-            to disconnect
+            weak (bool): The weakref state to disconnect.
 
+            dispatch_uid (Hashable): The unique identifier of the receiver
+                to disconnect.
         """
         if dispatch_uid:
             lookup_key = (dispatch_uid, _make_id(sender))
@@ -153,13 +153,13 @@ class Signal(object):  # pragma: no cover
         send, terminating the dispatch loop, so it is quite possible to not
         have all receivers called if a raises an error.
 
-        :param sender: The sender of the signal. Either a specific
-            object or :const:`None`.
-
-        :keyword \*\*named: Named arguments which will be passed to receivers.
-
-        :returns: a list of tuple pairs: `[(receiver, response), … ]`.
+        Arguments:
+            sender (Any): The sender of the signal. Either a specific
+                object or :const:`None`.
+            **named (Any): Named arguments which will be passed to receivers.
 
+        Returns:
+            List: of tuple pairs: `[(receiver, response), … ]`.
         """
         responses = []
         if not self.receivers:
@@ -180,7 +180,6 @@ class Signal(object):  # pragma: no cover
 
         This checks for weak references and resolves them, then returning only
         live receivers.
-
         """
         none_senderkey = _make_id(None)
         receivers = []

+ 1 - 7
celery/utils/encoding.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.utils.encoding
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    This module has moved to :mod:`kombu.utils.encoding`.
-
-"""
+"""**DEPRECATED**: This module has moved to :mod:`kombu.utils.encoding`."""
 from __future__ import absolute_import, unicode_literals
 
 from kombu.utils.encoding import (  # noqa

+ 7 - 25
celery/utils/functional.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.utils.functional
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Utilities for functions.
-
-"""
+"""Functional-style utilties."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import sys
@@ -50,12 +44,9 @@ class mlazy(lazy):
 
     The function is only evaluated once, every subsequent access
     will return the same value.
-
-    .. attribute:: evaluated
-
-        Set to to :const:`True` after the object has been evaluated.
-
     """
+
+    #: Set to :const:`True` after the object has been evaluated.
     evaluated = False
     _value = None
 
@@ -70,12 +61,13 @@ def noop(*args, **kwargs):
     """No operation.
 
     Takes any arguments/keyword arguments and does nothing.
-
     """
     pass
 
 
 def pass1(arg, *args, **kwargs):
+    """Take any number of arguments/keyword arguments and return
+    the first positional argument."""
     return arg
 
 
@@ -92,7 +84,6 @@ def first(predicate, it):
 
     If ``predicate`` is None it will return the first item that is not
     :const:`None`.
-
     """
     return next(
         (v for v in evaluate_promises(it) if (
@@ -107,7 +98,6 @@ def firstmethod(method, on_call=None):
 
     The list can also contain lazy instances
     (:class:`~kombu.utils.functional.lazy`.)
-
     """
 
     def _matcher(it, *args, **kwargs):
@@ -127,10 +117,7 @@ def firstmethod(method, on_call=None):
 def chunks(it, n):
     """Split an iterator into chunks with `n` elements each.
 
-    Examples:
-
-    .. code-block:: pycon
-
+    Example:
         # n == 2
         >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2)
         >>> list(x)
@@ -140,7 +127,6 @@ def chunks(it, n):
         >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3)
         >>> list(x)
         [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]]
-
     """
     for first in it:
         yield [first] + list(islice(it, n - 1))
@@ -149,10 +135,7 @@ def chunks(it, n):
 def padlist(container, size, default=None):
     """Pad list with default elements.
 
-    Examples:
-
-    .. code-block:: pycon
-
+    Example:
         >>> first, last, city = padlist(['George', 'Costanza', 'NYC'], 3)
         ('George', 'Costanza', 'NYC')
         >>> first, last, city = padlist(['George', 'Costanza'], 3)
@@ -161,7 +144,6 @@ def padlist(container, size, default=None):
         ...     ['George', 'Costanza', 'NYC'], 4, default='Earth',
         ... )
         ('George', 'Costanza', 'NYC', 'Earth')
-
     """
     return list(container)[:size] + [default] * (size - len(container))
 

+ 10 - 18
celery/utils/graph.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    ``celery.utils.graph``
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Dependency graph implementation.
-
-"""
+"""Dependency graph implementation."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 from collections import Counter
@@ -45,10 +39,8 @@ class DependencyGraph(object):
     Takes an optional iterator of ``(obj, dependencies)``
     tuples to build the graph from.
 
-    .. warning::
-
+    Warning:
         Does not support cycle detection.
-
     """
 
     def __init__(self, it=None, formatter=None):
@@ -73,9 +65,8 @@ class DependencyGraph(object):
     def topsort(self):
         """Sort the graph topologically.
 
-        :returns: a list of objects in the order
-            in which they must be handled.
-
+        Returns:
+            List: of objects in the order in which they must be handled.
         """
         graph = DependencyGraph()
         components = self._tarjan72()
@@ -121,7 +112,6 @@ class DependencyGraph(object):
         """Khans simple topological sort algorithm from '62
 
         See https://en.wikipedia.org/wiki/Topological_sorting
-
         """
         count = Counter()
         result = []
@@ -145,8 +135,8 @@ class DependencyGraph(object):
     def _tarjan72(self):
         """Tarjan's algorithm to find strongly connected components.
 
-        See http://bit.ly/vIMv3h.
-
+        See Also:
+            http://bit.ly/vIMv3h.
         """
         result, stack, low = [], [], {}
 
@@ -177,8 +167,10 @@ class DependencyGraph(object):
     def to_dot(self, fh, formatter=None):
         """Convert the graph to DOT format.
 
-        :param fh: A file, or a file-like object to write the graph to.
-
+        Arguments:
+            fh (IO): A file, or a file-like object to write the graph to.
+            formatter (celery.utils.graph.GraphFormatter): Custom graph
+                formatter to use.
         """
         seen = set()
         draw = formatter or self.formatter

+ 1 - 8
celery/utils/imports.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.utils.import
-    ~~~~~~~~~~~~~~~~~~~
-
-    Utilities related to importing modules and symbols by name.
-
-"""
+"""Utilities related to importing modules and symbols by name."""
 from __future__ import absolute_import, unicode_literals
 
 import imp as _imp
@@ -55,7 +49,6 @@ def instantiate(name, *args, **kwargs):
     """Instantiate class by name.
 
     See :func:`symbol_by_name`.
-
     """
     return symbol_by_name(name)(*args, **kwargs)
 

+ 3 - 2
celery/utils/iso8601.py

@@ -1,4 +1,6 @@
-"""Originally taken from :pypi:`pyiso8601`
+"""Parse ISO8601 dates.
+
+Originally taken from :pypi:`pyiso8601`
 (http://code.google.com/p/pyiso8601/)
 
 Modified to match the behavior of ``dateutil.parser``:
@@ -29,7 +31,6 @@ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
 CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
 TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
 """
 from __future__ import absolute_import, unicode_literals
 

+ 21 - 31
celery/utils/log.py

@@ -1,11 +1,5 @@
 # -*- coding: utf-8 -*-
-"""
-    celery.utils.log
-    ~~~~~~~~~~~~~~~~
-
-    Logging utilities.
-
-"""
+"""Logging utilities."""
 from __future__ import absolute_import, print_function, unicode_literals
 
 import logging
@@ -24,17 +18,19 @@ from celery.five import string_t, text_t
 
 from .term import colored
 
-__all__ = ['ColorFormatter', 'LoggingProxy', 'base_logger',
-           'set_in_sighandler', 'in_sighandler', 'get_logger',
-           'get_task_logger', 'mlevel',
-           'get_multiprocessing_logger', 'reset_multiprocessing_logger']
+__all__ = [
+    'ColorFormatter', 'LoggingProxy', 'base_logger',
+    'set_in_sighandler', 'in_sighandler', 'get_logger',
+    'get_task_logger', 'mlevel',
+    'get_multiprocessing_logger', 'reset_multiprocessing_logger',
+]
 
 _process_aware = False
+_in_sighandler = False
 PY3 = sys.version_info[0] == 3
 
 MP_LOG = os.environ.get('MP_LOG', False)
 
-
 # Sets up our logging hierarchy.
 #
 # Every logger in the celery package inherits from the "celery"
@@ -42,8 +38,6 @@ MP_LOG = os.environ.get('MP_LOG', False)
 # logger.
 base_logger = logger = _get_logger('celery')
 
-_in_sighandler = False
-
 
 def set_in_sighandler(value):
     global _in_sighandler
@@ -121,8 +115,12 @@ def mlevel(level):
 class ColorFormatter(logging.Formatter):
     #: Loglevel -> Color mapping.
     COLORS = colored().names
-    colors = {'DEBUG': COLORS['blue'], 'WARNING': COLORS['yellow'],
-              'ERROR': COLORS['red'], 'CRITICAL': COLORS['magenta']}
+    colors = {
+        'DEBUG': COLORS['blue'],
+        'WARNING': COLORS['yellow'],
+        'ERROR': COLORS['red'],
+        'CRITICAL': COLORS['magenta'],
+    }
 
     def __init__(self, fmt=None, use_color=True):
         logging.Formatter.__init__(self, fmt)
@@ -172,9 +170,9 @@ class ColorFormatter(logging.Formatter):
 class LoggingProxy(object):
     """Forward file object to :class:`logging.Logger` instance.
 
-    :param logger: The :class:`logging.Logger` instance to forward to.
-    :param loglevel: Loglevel to use when writing messages.
-
+    Arguments:
+        logger (~logging.Logger): Logger instance to forward to.
+        loglevel (int, str): Log level to use when logging messages.
     """
     mode = 'w'
     name = None
@@ -189,7 +187,7 @@ class LoggingProxy(object):
 
     def _safewrap_handlers(self):
         """Make the logger handlers dump internal errors to
-        `sys.__stderr__` instead of `sys.stderr` to circumvent
+        :data:`sys.__stderr__` instead of :data:`sys.stderr` to circumvent
         infinite loops."""
 
         def wrap_handler(handler):                  # pragma: no cover
@@ -197,17 +195,10 @@ class LoggingProxy(object):
             class WithSafeHandleError(logging.Handler):
 
                 def handleError(self, record):
-                    exc_info = sys.exc_info()
                     try:
-                        try:
-                            traceback.print_exception(exc_info[0],
-                                                      exc_info[1],
-                                                      exc_info[2],
-                                                      None, sys.__stderr__)
-                        except IOError:
-                            pass    # see python issue 5971
-                    finally:
-                        del(exc_info)
+                        traceback.print_exc(None, sys.__stderr__)
+                    except IOError:
+                        pass    # see python issue 5971
 
             handler.handleError = WithSafeHandleError().handleError
         return [wrap_handler(h) for h in self.logger.handlers]
@@ -234,7 +225,6 @@ class LoggingProxy(object):
 
         The sequence can be any iterable object producing strings.
         This is equivalent to calling :meth:`write` for each string.
-
         """
         for part in sequence:
             self.write(part)

Some files were not shown because too many files changed in this diff