Browse Source

Adds pep257 as a tox target

Ask Solem 8 years ago
parent
commit
4cd5bc3f50
100 changed files with 945 additions and 598 deletions
  1. 7 1
      Makefile
  2. 9 6
      celery/__init__.py
  3. 1 0
      celery/__main__.py
  4. 3 0
      celery/_state.py
  5. 4 2
      celery/app/__init__.py
  6. 11 7
      celery/app/amqp.py
  7. 3 2
      celery/app/annotations.py
  8. 43 33
      celery/app/base.py
  9. 5 7
      celery/app/builtins.py
  10. 18 4
      celery/app/control.py
  11. 4 0
      celery/app/defaults.py
  12. 7 5
      celery/app/log.py
  13. 2 0
      celery/app/registry.py
  14. 2 2
      celery/app/routes.py
  15. 35 17
      celery/app/task.py
  16. 10 4
      celery/app/trace.py
  17. 6 6
      celery/app/utils.py
  18. 2 4
      celery/apps/beat.py
  19. 3 0
      celery/apps/multi.py
  20. 2 1
      celery/apps/worker.py
  21. 2 1
      celery/backends/__init__.py
  22. 4 0
      celery/backends/amqp.py
  23. 9 1
      celery/backends/async.py
  24. 7 5
      celery/backends/base.py
  25. 1 0
      celery/backends/cache.py
  26. 2 2
      celery/backends/cassandra.py
  27. 2 1
      celery/backends/consul.py
  28. 1 0
      celery/backends/database/__init__.py
  29. 1 1
      celery/backends/database/models.py
  30. 1 0
      celery/backends/database/session.py
  31. 4 2
      celery/backends/mongodb.py
  32. 2 0
      celery/backends/riak.py
  33. 3 0
      celery/backends/rpc.py
  34. 5 3
      celery/beat.py
  35. 4 1
      celery/bin/amqp.py
  36. 11 4
      celery/bin/base.py
  37. 1 0
      celery/bin/beat.py
  38. 5 2
      celery/bin/celery.py
  39. 3 0
      celery/bin/celeryd_detach.py
  40. 1 0
      celery/bin/events.py
  41. 2 0
      celery/bin/graph.py
  42. 2 0
      celery/bin/logtool.py
  43. 2 0
      celery/bin/multi.py
  44. 2 0
      celery/bin/worker.py
  45. 9 2
      celery/bootsteps.py
  46. 37 13
      celery/canvas.py
  47. 1 0
      celery/concurrency/__init__.py
  48. 29 31
      celery/concurrency/asynpool.py
  49. 4 1
      celery/concurrency/base.py
  50. 3 0
      celery/concurrency/eventlet.py
  51. 2 0
      celery/concurrency/gevent.py
  52. 2 1
      celery/concurrency/prefork.py
  53. 1 0
      celery/concurrency/solo.py
  54. 7 2
      celery/contrib/abortable.py
  55. 15 6
      celery/contrib/migrate.py
  56. 3 2
      celery/contrib/rdb.py
  57. 4 0
      celery/contrib/sphinx.py
  58. 7 6
      celery/events/__init__.py
  59. 3 0
      celery/events/cursesmon.py
  60. 2 0
      celery/events/dumper.py
  61. 2 0
      celery/events/snapshot.py
  62. 9 2
      celery/events/state.py
  63. 8 12
      celery/exceptions.py
  64. 2 222
      celery/five.py
  65. 1 0
      celery/fixups/__init__.py
  66. 2 0
      celery/fixups/django.py
  67. 1 1
      celery/loaders/__init__.py
  68. 1 1
      celery/loaders/app.py
  69. 9 14
      celery/loaders/base.py
  70. 1 2
      celery/loaders/default.py
  71. 218 7
      celery/local.py
  72. 22 11
      celery/platforms.py
  73. 33 23
      celery/result.py
  74. 51 23
      celery/schedules.py
  75. 7 7
      celery/security/certificate.py
  76. 1 0
      celery/security/key.py
  77. 4 3
      celery/security/serialization.py
  78. 1 0
      celery/security/utils.py
  79. 3 1
      celery/states.py
  80. 9 9
      celery/task/base.py
  81. 4 0
      celery/utils/abstract.py
  82. 13 7
      celery/utils/collections.py
  83. 14 6
      celery/utils/debug.py
  84. 2 0
      celery/utils/deprecated.py
  85. 1 0
      celery/utils/dispatch/__init__.py
  86. 17 13
      celery/utils/dispatch/saferef.py
  87. 2 1
      celery/utils/dispatch/signal.py
  88. 17 10
      celery/utils/functional.py
  89. 11 6
      celery/utils/graph.py
  90. 6 3
      celery/utils/imports.py
  91. 1 2
      celery/utils/iso8601.py
  92. 20 11
      celery/utils/log.py
  93. 8 2
      celery/utils/nodenames.py
  94. 3 1
      celery/utils/objects.py
  95. 14 0
      celery/utils/pytest.py
  96. 8 1
      celery/utils/saferepr.py
  97. 11 6
      celery/utils/serialization.py
  98. 5 0
      celery/utils/static/__init__.py
  99. 8 2
      celery/utils/sysinfo.py
  100. 12 1
      celery/utils/text.py

+ 7 - 1
Makefile

@@ -6,6 +6,8 @@ GIT=git
 TOX=tox
 TOX=tox
 ICONV=iconv
 ICONV=iconv
 FLAKE8=flake8
 FLAKE8=flake8
+PEP257=pep257
+PYROMA=pyroma
 FLAKEPLUS=flakeplus
 FLAKEPLUS=flakeplus
 SPHINX2RST=sphinx2rst
 SPHINX2RST=sphinx2rst
 RST2HTML=rst2html.py
 RST2HTML=rst2html.py
@@ -40,6 +42,7 @@ help:
 	@echo "    flakes --------  - Check code for syntax and style errors."
 	@echo "    flakes --------  - Check code for syntax and style errors."
 	@echo "      flakecheck     - Run flake8 on the source code."
 	@echo "      flakecheck     - Run flake8 on the source code."
 	@echo "      flakepluscheck - Run flakeplus on the source code."
 	@echo "      flakepluscheck - Run flakeplus on the source code."
+	@echo "      pep257check    - Run pep257 on the source code."
 	@echo "readme               - Regenerate README.rst file."
 	@echo "readme               - Regenerate README.rst file."
 	@echo "contrib              - Regenerate CONTRIBUTING.rst file"
 	@echo "contrib              - Regenerate CONTRIBUTING.rst file"
 	@echo "clean-dist --------- - Clean all distribution build artifacts."
 	@echo "clean-dist --------- - Clean all distribution build artifacts."
@@ -90,6 +93,9 @@ flakecheck:
 	# the only way to enable all-1 errors is to ignore one of them.
 	# the only way to enable all-1 errors is to ignore one of them.
 	$(FLAKE8) --ignore=X999 "$(PROJ)" "$(TESTDIR)"
 	$(FLAKE8) --ignore=X999 "$(PROJ)" "$(TESTDIR)"
 
 
+pep257check:
+	$(PEP257) --ignore=D102,D104,D203,D105 "$(PROJ)"
+
 flakediag:
 flakediag:
 	-$(MAKE) flakecheck
 	-$(MAKE) flakecheck
 
 
@@ -99,7 +105,7 @@ flakepluscheck:
 flakeplusdiag:
 flakeplusdiag:
 	-$(MAKE) flakepluscheck
 	-$(MAKE) flakepluscheck
 
 
-flakes: flakediag flakeplusdiag
+flakes: flakediag flakeplusdiag pep257check
 
 
 clean-readme:
 clean-readme:
 	-rm -f $(README)
 	-rm -f $(README)

+ 9 - 6
celery/__init__.py

@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
-"""Distributed Task Queue"""
+"""Distributed Task Queue."""
 # :copyright: (c) 2015-2016 Ask Solem.  All rights reserved.
 # :copyright: (c) 2015-2016 Ask Solem.  All rights reserved.
 # :copyright: (c) 2012-2014 GoPivotal, Inc., All rights reserved.
 # :copyright: (c) 2012-2014 GoPivotal, Inc., All rights reserved.
 # :copyright: (c) 2009 - 2012 Ask Solem and individual contributors,
 # :copyright: (c) 2009 - 2012 Ask Solem and individual contributors,
@@ -122,10 +122,13 @@ def maybe_patch_concurrency(argv=sys.argv,
                             short_opts=['-P'], long_opts=['--pool'],
                             short_opts=['-P'], long_opts=['--pool'],
                             patches={'eventlet': _patch_eventlet,
                             patches={'eventlet': _patch_eventlet,
                                      'gevent': _patch_gevent}):
                                      'gevent': _patch_gevent}):
-    """With short and long opt alternatives that specify the command line
+    """Apply eventlet/gevent monkeypatches.
+
+    With short and long opt alternatives that specify the command line
     option to set the pool, this makes sure that anything that needs
     option to set the pool, this makes sure that anything that needs
     to be patched is completed as early as possible.
     to be patched is completed as early as possible.
-    (e.g., eventlet/gevent monkey patches)."""
+    (e.g., eventlet/gevent monkey patches).
+    """
     try:
     try:
         pool = _find_option_with_arg(argv, short_opts, long_opts)
         pool = _find_option_with_arg(argv, short_opts, long_opts)
     except KeyError:
     except KeyError:
@@ -143,9 +146,9 @@ def maybe_patch_concurrency(argv=sys.argv,
         concurrency.get_implementation(pool)
         concurrency.get_implementation(pool)
 
 
 # Lazy loading
 # Lazy loading
-from celery import five  # noqa
+from celery import local  # noqa
 
 
-old_module, new_module = five.recreate_module(  # pragma: no cover
+old_module, new_module = local.recreate_module(  # pragma: no cover
     __name__,
     __name__,
     by_module={
     by_module={
         'celery.app': ['Celery', 'bugreport', 'shared_task'],
         'celery.app': ['Celery', 'bugreport', 'shared_task'],
@@ -162,7 +165,7 @@ old_module, new_module = five.recreate_module(  # pragma: no cover
     __package__='celery', __file__=__file__,
     __package__='celery', __file__=__file__,
     __path__=__path__, __doc__=__doc__, __version__=__version__,
     __path__=__path__, __doc__=__doc__, __version__=__version__,
     __author__=__author__, __contact__=__contact__,
     __author__=__author__, __contact__=__contact__,
-    __homepage__=__homepage__, __docformat__=__docformat__, five=five,
+    __homepage__=__homepage__, __docformat__=__docformat__, local=local,
     VERSION=VERSION, SERIES=SERIES, VERSION_BANNER=VERSION_BANNER,
     VERSION=VERSION, SERIES=SERIES, VERSION_BANNER=VERSION_BANNER,
     version_info_t=version_info_t,
     version_info_t=version_info_t,
     version_info=version_info,
     version_info=version_info,

+ 1 - 0
celery/__main__.py

@@ -9,6 +9,7 @@ __all__ = ['main']
 
 
 
 
 def main():
 def main():
+    """Entrypoint to the ``celery`` umbrella command."""
     if 'multi' not in sys.argv:
     if 'multi' not in sys.argv:
         maybe_patch_concurrency()
         maybe_patch_concurrency()
     from celery.bin.celery import main
     from celery.bin.celery import main

+ 3 - 0
celery/_state.py

@@ -36,6 +36,7 @@ _task_join_will_block = False
 
 
 
 
 def connect_on_app_finalize(callback):
 def connect_on_app_finalize(callback):
+    """Connect callback to be called when any app is finalized."""
     _on_app_finalizers.add(callback)
     _on_app_finalizers.add(callback)
     return callback
     return callback
 
 
@@ -66,6 +67,7 @@ _task_stack = LocalStack()
 
 
 
 
 def set_default_app(app):
 def set_default_app(app):
+    """Set default app."""
     global default_app
     global default_app
     default_app = app
     default_app = app
 
 
@@ -87,6 +89,7 @@ def _set_current_app(app):
 
 
 if os.environ.get('C_STRICT_APP'):  # pragma: no cover
 if os.environ.get('C_STRICT_APP'):  # pragma: no cover
     def get_current_app():
     def get_current_app():
+        """Return the current app."""
         raise Exception('USES CURRENT APP')
         raise Exception('USES CURRENT APP')
         import traceback
         import traceback
         print('-- USES CURRENT_APP', file=sys.stderr)  # noqa+
         print('-- USES CURRENT_APP', file=sys.stderr)  # noqa+

+ 4 - 2
celery/app/__init__.py

@@ -42,6 +42,7 @@ pop_current_task = _task_stack.pop
 
 
 
 
 def bugreport(app=None):
 def bugreport(app=None):
+    """Return information useful in bug reports."""
     return (app or current_app()).bugreport()
     return (app or current_app()).bugreport()
 
 
 
 
@@ -71,11 +72,13 @@ def _app_or_default_trace(app=None):  # pragma: no cover
 
 
 
 
 def enable_trace():
 def enable_trace():
+    """Enable tracing of app instances."""
     global app_or_default
     global app_or_default
     app_or_default = _app_or_default_trace
     app_or_default = _app_or_default_trace
 
 
 
 
 def disable_trace():
 def disable_trace():
+    """Disable tracing of app instances."""
     global app_or_default
     global app_or_default
     app_or_default = _app_or_default
     app_or_default = _app_or_default
 
 
@@ -86,7 +89,7 @@ else:
 
 
 
 
 def shared_task(*args, **kwargs):
 def shared_task(*args, **kwargs):
-    """Create shared tasks (decorator).
+    """Create shared task (decorator).
 
 
     This can be used by library authors to create tasks that'll work
     This can be used by library authors to create tasks that'll work
     for any app environment.
     for any app environment.
@@ -109,7 +112,6 @@ def shared_task(*args, **kwargs):
         >>> add.app is app2
         >>> add.app is app2
         True
         True
     """
     """
-
     def create_shared_task(**options):
     def create_shared_task(**options):
 
 
         def __inner(fun):
         def __inner(fun):

+ 11 - 7
celery/app/amqp.py

@@ -61,6 +61,7 @@ class Queues(dict):
         ha_policy (Sequence, str): Default HA policy for queues with none set.
         ha_policy (Sequence, str): Default HA policy for queues with none set.
         max_priority (int): Default x-max-priority for queues with none set.
         max_priority (int): Default x-max-priority for queues with none set.
     """
     """
+
     #: If set, this is a subset of queues to consume from.
     #: If set, this is a subset of queues to consume from.
     #: The rest of the queues are then used for routing only.
     #: The rest of the queues are then used for routing only.
     _consume_from = None
     _consume_from = None
@@ -163,17 +164,18 @@ class Queues(dict):
         return info[0] + '\n' + textindent('\n'.join(info[1:]), indent)
         return info[0] + '\n' + textindent('\n'.join(info[1:]), indent)
 
 
     def select_add(self, queue, **kwargs):
     def select_add(self, queue, **kwargs):
-        """Add new task queue that'll be consumed from even when
-        a subset has been selected using the
-        :option:`celery worker -Q` option."""
+        """Add new task queue that'll be consumed from.
+
+        The queue will be active even when a subset has been selected
+        using the :option:`celery worker -Q` option.
+        """
         q = self.add(queue, **kwargs)
         q = self.add(queue, **kwargs)
         if self._consume_from is not None:
         if self._consume_from is not None:
             self._consume_from[q.name] = q
             self._consume_from[q.name] = q
         return q
         return q
 
 
     def select(self, include):
     def select(self, include):
-        """Sets :attr:`consume_from` by selecting a subset of the
-        currently defined queues.
+        """Select a subset of currently defined queues to consume from.
 
 
         Arguments:
         Arguments:
             include (Sequence[str], str): Names of queues to consume from.
             include (Sequence[str], str): Names of queues to consume from.
@@ -210,6 +212,8 @@ class Queues(dict):
 
 
 
 
 class AMQP(object):
 class AMQP(object):
+    """App AMQP API: app.amqp."""
+
     Connection = Connection
     Connection = Connection
     Consumer = Consumer
     Consumer = Consumer
     Producer = Producer
     Producer = Producer
@@ -256,8 +260,8 @@ class AMQP(object):
 
 
     def Queues(self, queues, create_missing=None, ha_policy=None,
     def Queues(self, queues, create_missing=None, ha_policy=None,
                autoexchange=None, max_priority=None):
                autoexchange=None, max_priority=None):
-        """Create new :class:`Queues` instance, using queue defaults
-        from the current configuration."""
+        # Create new :class:`Queues` instance, using queue defaults
+        # from the current configuration.
         conf = self.app.conf
         conf = self.app.conf
         if create_missing is None:
         if create_missing is None:
             create_missing = conf.task_create_missing_queues
             create_missing = conf.task_create_missing_queues

+ 3 - 2
celery/app/annotations.py

@@ -20,6 +20,7 @@ __all__ = ['MapAnnotation', 'prepare', 'resolve_all']
 
 
 
 
 class MapAnnotation(dict):
 class MapAnnotation(dict):
+    """Annotation map: task_name => attributes."""
 
 
     def annotate_any(self):
     def annotate_any(self):
         try:
         try:
@@ -35,8 +36,7 @@ class MapAnnotation(dict):
 
 
 
 
 def prepare(annotations):
 def prepare(annotations):
-    """Expands the :setting:`task_annotations` setting."""
-
+    """Expand the :setting:`task_annotations` setting."""
     def expand_annotation(annotation):
     def expand_annotation(annotation):
         if isinstance(annotation, dict):
         if isinstance(annotation, dict):
             return MapAnnotation(annotation)
             return MapAnnotation(annotation)
@@ -52,4 +52,5 @@ def prepare(annotations):
 
 
 
 
 def resolve_all(anno, task):
 def resolve_all(anno, task):
+    """Resolve all pending annotations."""
     return (x for x in (_first_match(anno, task), _first_match_any(anno)) if x)
     return (x for x in (_first_match(anno, task), _first_match_any(anno)) if x)

+ 43 - 33
celery/app/base.py

@@ -76,7 +76,7 @@ Example:
 
 
 
 
 def app_has_custom(app, attr):
 def app_has_custom(app, attr):
-    """Returns true if app has customized method `attr`.
+    """Return true if app has customized method `attr`.
 
 
     Note:
     Note:
         This is used for optimizations in cases where we know
         This is used for optimizations in cases where we know
@@ -88,8 +88,9 @@ def app_has_custom(app, attr):
 
 
 
 
 def _unpickle_appattr(reverse_name, args):
 def _unpickle_appattr(reverse_name, args):
-    """Given an attribute name and a list of args, gets
-    the attribute from the current app and calls it."""
+    """Unpickle app."""
+    # Given an attribute name and a list of args, gets
+    # the attribute from the current app and calls it.
     return get_current_app()._rgetattr(reverse_name)(*args)
     return get_current_app()._rgetattr(reverse_name)(*args)
 
 
 
 
@@ -153,6 +154,7 @@ class Celery(object):
             or object.  Attributes may include any setings described in
             or object.  Attributes may include any setings described in
             the documentation.
             the documentation.
     """
     """
+
     #: This is deprecated, use :meth:`reduce_keys` instead
     #: This is deprecated, use :meth:`reduce_keys` instead
     Pickler = AppPickler
     Pickler = AppPickler
 
 
@@ -281,11 +283,11 @@ class Celery(object):
             self._preconf_set_by_auto.add(key)
             self._preconf_set_by_auto.add(key)
 
 
     def set_current(self):
     def set_current(self):
-        """Makes this the current app for this thread."""
+        """Make this the current app for this thread."""
         _set_current_app(self)
         _set_current_app(self)
 
 
     def set_default(self):
     def set_default(self):
-        """Makes this the default app for all threads."""
+        """Make this the default app for all threads."""
         set_default_app(self)
         set_default_app(self)
 
 
     def _ensure_after_fork(self):
     def _ensure_after_fork(self):
@@ -441,8 +443,11 @@ class Celery(object):
         return gen_task_name(self, name, module)
         return gen_task_name(self, name, module)
 
 
     def finalize(self, auto=False):
     def finalize(self, auto=False):
-        """Finalizes the app by loading built-in tasks,
-        and evaluating pending task decorators."""
+        """Finalize the app.
+
+        This loads built-in tasks, evaluates pending task decorators,
+        reads configuration, etc.
+        """
         with self._finalize_mutex:
         with self._finalize_mutex:
             if not self.finalized:
             if not self.finalized:
                 if auto and not self.autofinalize:
                 if auto and not self.autofinalize:
@@ -485,8 +490,9 @@ class Celery(object):
 
 
     def config_from_object(self, obj,
     def config_from_object(self, obj,
                            silent=False, force=False, namespace=None):
                            silent=False, force=False, namespace=None):
-        """Reads configuration from object, where object is either
-        an object or the name of a module to import.
+        """Read configuration from object.
+
+        Object is either an actual object or the name of a module to import.
 
 
         Example:
         Example:
             >>> celery.config_from_object('myapp.celeryconfig')
             >>> celery.config_from_object('myapp.celeryconfig')
@@ -560,8 +566,10 @@ class Celery(object):
 
 
     def autodiscover_tasks(self, packages=None,
     def autodiscover_tasks(self, packages=None,
                            related_name='tasks', force=False):
                            related_name='tasks', force=False):
-        """Try to auto-discover and import modules with a specific name (by
-        default 'tasks').
+        """Auto-discover task modules.
+
+        Searches a list of packages for a "tasks.py" module (or use
+        related_name argument).
 
 
         If the name is empty, this will be delegated to fix-ups (e.g., Django).
         If the name is empty, this will be delegated to fix-ups (e.g., Django).
 
 
@@ -775,7 +783,9 @@ class Celery(object):
         return self.connection_for_write()
         return self.connection_for_write()
 
 
     def connection_or_acquire(self, connection=None, pool=True, *_, **__):
     def connection_or_acquire(self, connection=None, pool=True, *_, **__):
-        """For use within a :keyword:`with` statement to get a connection
+        """Context used to acquire a connection from the pool.
+
+        For use within a :keyword:`with` statement to get a connection
         from the pool if one is not already provided.
         from the pool if one is not already provided.
 
 
         Arguments:
         Arguments:
@@ -786,7 +796,9 @@ class Celery(object):
     default_connection = connection_or_acquire  # XXX compat
     default_connection = connection_or_acquire  # XXX compat
 
 
     def producer_or_acquire(self, producer=None):
     def producer_or_acquire(self, producer=None):
-        """For use within a :keyword:`with` statement to get a producer
+        """Context used to acquire a producer from the pool.
+
+        For use within a :keyword:`with` statement to get a producer
         from the pool if one is not already provided
         from the pool if one is not already provided
 
 
         Arguments:
         Arguments:
@@ -803,25 +815,29 @@ class Celery(object):
         return find_deprecated_settings(c)
         return find_deprecated_settings(c)
 
 
     def now(self):
     def now(self):
-        """Return the current time and date as a
-        :class:`~datetime.datetime` object."""
+        """Return the current time and date as a datetime."""
         return self.loader.now(utc=self.conf.enable_utc)
         return self.loader.now(utc=self.conf.enable_utc)
 
 
     def select_queues(self, queues=None):
     def select_queues(self, queues=None):
-        """Select a subset of queues, where queues must be a list of queue
-        names to keep."""
+        """Select subset of queues.
+
+        Arguments:
+            queues (Sequence[str]): a list of queue names to keep.
+        """
         return self.amqp.queues.select(queues)
         return self.amqp.queues.select(queues)
 
 
     def either(self, default_key, *values):
     def either(self, default_key, *values):
-        """Fallback to the value of a configuration key if none of the
-        `*values` are true."""
+        """Get key from configuration or use default values.
+
+        Fallback to the value of a configuration key if none of the
+        `*values` are true.
+        """
         return first(None, [
         return first(None, [
             first(None, values), starpromise(self.conf.get, default_key),
             first(None, values), starpromise(self.conf.get, default_key),
         ])
         ])
 
 
     def bugreport(self):
     def bugreport(self):
-        """Return a string with information useful for the Celery core
-        developers when reporting a bug."""
+        """Return information useful in bug reports."""
         return bugreport(self)
         return bugreport(self)
 
 
     def _get_backend(self):
     def _get_backend(self):
@@ -832,13 +848,11 @@ class Celery(object):
         return backend(app=self, url=url)
         return backend(app=self, url=url)
 
 
     def _get_from_conf_and_finalize(self, key):
     def _get_from_conf_and_finalize(self, key):
-        """Get value for configuration key and finalize
-        loading the configuration.
+        """Get config value by key and finalize loading the configuration.
 
 
         Note:
         Note:
             This is used by PendingConfiguration:
             This is used by PendingConfiguration:
-                as soon as you access a key the configuration
-                is read.
+                as soon as you access a key the configuration is read.
         """
         """
         conf = self._conf = self._load_config()
         conf = self._conf = self._load_config()
         return conf[key]
         return conf[key]
@@ -917,8 +931,7 @@ class Celery(object):
         self._conf.beat_schedule[key] = entry
         self._conf.beat_schedule[key] = entry
 
 
     def create_task_cls(self):
     def create_task_cls(self):
-        """Creates a base task class using default configuration
-        taken from this app."""
+        """Create a base task class bound to this app."""
         return self.subclass_with_self(
         return self.subclass_with_self(
             self.task_cls, name='Task', attribute='_app',
             self.task_cls, name='Task', attribute='_app',
             keep_reduce=True, abstract=True,
             keep_reduce=True, abstract=True,
@@ -926,8 +939,7 @@ class Celery(object):
 
 
     def subclass_with_self(self, Class, name=None, attribute='app',
     def subclass_with_self(self, Class, name=None, attribute='app',
                            reverse=None, keep_reduce=False, **kw):
                            reverse=None, keep_reduce=False, **kw):
-        """Subclass an app-compatible class by setting its app attribute
-        to be this app instance.
+        """Subclass an app-compatible class.
 
 
         App-compatible means that the class has a class attribute that
         App-compatible means that the class has a class attribute that
         provides the default app it should use, for example:
         provides the default app it should use, for example:
@@ -987,8 +999,7 @@ class Celery(object):
         )
         )
 
 
     def __reduce_keys__(self):
     def __reduce_keys__(self):
-        """Return keyword arguments used to reconstruct the object
-        when unpickling."""
+        """Keyword arguments used to reconstruct the object when unpickling."""
         return {
         return {
             'main': self.main,
             'main': self.main,
             'changes':
             'changes':
@@ -1086,8 +1097,7 @@ class Celery(object):
 
 
     @property
     @property
     def current_task(self):
     def current_task(self):
-        """The instance of the task that's being executed, or
-        :const:`None`."""
+        """Instance of task being executed, or :const:`None`."""
         return _task_stack.top
         return _task_stack.top
 
 
     @property
     @property

+ 5 - 7
celery/app/builtins.py

@@ -15,8 +15,7 @@ logger = get_logger(__name__)
 
 
 @connect_on_app_finalize
 @connect_on_app_finalize
 def add_backend_cleanup_task(app):
 def add_backend_cleanup_task(app):
-    """The backend cleanup task can be used to clean up the default result
-    backend.
+    """Task used to clean up expired results.
 
 
     If the configured backend requires periodic cleanup this task is also
     If the configured backend requires periodic cleanup this task is also
     automatically configured to run every day at 4am (requires
     automatically configured to run every day at 4am (requires
@@ -30,8 +29,7 @@ def add_backend_cleanup_task(app):
 
 
 @connect_on_app_finalize
 @connect_on_app_finalize
 def add_accumulate_task(app):
 def add_accumulate_task(app):
-    """This task is used by Task.replace when replacing a task with
-    a group, to "collect" results."""
+    """Task used by Task.replace when replacing task with group."""
     @app.task(bind=True, name='celery.accumulate', shared=False, lazy=False)
     @app.task(bind=True, name='celery.accumulate', shared=False, lazy=False)
     def accumulate(self, *args, **kwargs):
     def accumulate(self, *args, **kwargs):
         index = kwargs.get('index')
         index = kwargs.get('index')
@@ -40,9 +38,10 @@ def add_accumulate_task(app):
 
 
 @connect_on_app_finalize
 @connect_on_app_finalize
 def add_unlock_chord_task(app):
 def add_unlock_chord_task(app):
-    """This task is used by result backends without native chord support.
+    """Task used by result backends without native chord support.
 
 
-    It joins chords by creating a task chain polling the header for completion.
+    Will joins chord by creating a task chain polling the header
+    for completion.
     """
     """
     from celery.canvas import maybe_signature
     from celery.canvas import maybe_signature
     from celery.exceptions import ChordError
     from celery.exceptions import ChordError
@@ -158,7 +157,6 @@ def add_group_task(app):
 @connect_on_app_finalize
 @connect_on_app_finalize
 def add_chain_task(app):
 def add_chain_task(app):
     """No longer used, but here for backwards compatibility."""
     """No longer used, but here for backwards compatibility."""
-
     @app.task(name='celery.chain', shared=False, lazy=False)
     @app.task(name='celery.chain', shared=False, lazy=False)
     def chain(*args, **kwargs):
     def chain(*args, **kwargs):
         raise NotImplementedError('chain is not a real task')
         raise NotImplementedError('chain is not a real task')

+ 18 - 4
celery/app/control.py

@@ -27,6 +27,18 @@ the celery worker `-n` option.\
 
 
 
 
 def flatten_reply(reply):
 def flatten_reply(reply):
+    """Flatten node replies.
+
+    Convert from a list of replies in this format::
+
+        [{'a@example.com': reply},
+         {'b@example.com': reply}]
+
+    into this format::
+
+        {'a@example.com': reply,
+         'b@example.com': reply}
+    """
     nodes, dupes = {}, set()
     nodes, dupes = {}, set()
     for item in reply:
     for item in reply:
         [dupes.add(name) for name in item if name in nodes]
         [dupes.add(name) for name in item if name in nodes]
@@ -41,6 +53,8 @@ def flatten_reply(reply):
 
 
 
 
 class Inspect(object):
 class Inspect(object):
+    """API for app.control.inspect."""
+
     app = None
     app = None
 
 
     def __init__(self, destination=None, timeout=1, callback=None,
     def __init__(self, destination=None, timeout=1, callback=None,
@@ -125,6 +139,8 @@ class Inspect(object):
 
 
 
 
 class Control(object):
 class Control(object):
+    """Worker remote control client."""
+
     Mailbox = Mailbox
     Mailbox = Mailbox
 
 
     def __init__(self, app=None):
     def __init__(self, app=None):
@@ -198,8 +214,7 @@ class Control(object):
                               timeout=timeout, **kwargs)
                               timeout=timeout, **kwargs)
 
 
     def rate_limit(self, task_name, rate_limit, destination=None, **kwargs):
     def rate_limit(self, task_name, rate_limit, destination=None, **kwargs):
-        """Tell all (or specific) workers to set a new rate limit
-        for task by type.
+        """Tell workers to set a new rate limit for task by type.
 
 
         Arguments:
         Arguments:
             task_name (str): Name of task to change rate limit for.
             task_name (str): Name of task to change rate limit for.
@@ -259,8 +274,7 @@ class Control(object):
         )
         )
 
 
     def time_limit(self, task_name, soft=None, hard=None, **kwargs):
     def time_limit(self, task_name, soft=None, hard=None, **kwargs):
-        """Tell all (or specific) workers to set time limits for
-        a task by type.
+        """Tell workers to set time limits for a task by type.
 
 
         Arguments:
         Arguments:
             task_name (str): Name of task to change time limits for.
             task_name (str): Name of task to change time limits for.

+ 4 - 0
celery/app/defaults.py

@@ -53,6 +53,8 @@ def old_ns(ns):
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class Option(object):
 class Option(object):
+    """Decribes a Celery configuration option."""
+
     alt = None
     alt = None
     deprecate_by = None
     deprecate_by = None
     remove_by = None
     remove_by = None
@@ -295,6 +297,7 @@ def _to_compat(ns, key, opt):
 
 
 
 
 def flatten(d, root='', keyfilter=_flatten_keys):
 def flatten(d, root='', keyfilter=_flatten_keys):
+    """Flatten settings."""
     stack = deque([(root, d)])
     stack = deque([(root, d)])
     while stack:
     while stack:
         ns, options = stack.popleft()
         ns, options = stack.popleft()
@@ -330,6 +333,7 @@ def find_deprecated_settings(source):  # pragma: no cover
 
 
 @memoize(maxsize=None)
 @memoize(maxsize=None)
 def find(name, namespace='celery'):
 def find(name, namespace='celery'):
+    """Find setting by name."""
     # - Try specified name-space first.
     # - Try specified name-space first.
     namespace = namespace.lower()
     namespace = namespace.lower()
     try:
     try:

+ 7 - 5
celery/app/log.py

@@ -19,7 +19,8 @@ from kombu.utils.encoding import set_default_encoding_file
 
 
 from celery import signals
 from celery import signals
 from celery._state import get_current_task
 from celery._state import get_current_task
-from celery.five import class_property, string_t
+from celery.five import string_t
+from celery.local import class_property
 from celery.platforms import isatty
 from celery.platforms import isatty
 from celery.utils.log import (
 from celery.utils.log import (
     get_logger, mlevel,
     get_logger, mlevel,
@@ -35,6 +36,7 @@ MP_LOG = os.environ.get('MP_LOG', False)
 
 
 
 
 class TaskFormatter(ColorFormatter):
 class TaskFormatter(ColorFormatter):
+    """Formatter for tasks, adding the task name and id."""
 
 
     def format(self, record):
     def format(self, record):
         task = get_current_task()
         task = get_current_task()
@@ -48,6 +50,8 @@ class TaskFormatter(ColorFormatter):
 
 
 
 
 class Logging(object):
 class Logging(object):
+    """Application logging setup (app.log)."""
+
     #: The logging subsystem is only configured once per process.
     #: The logging subsystem is only configured once per process.
     #: setup_logging_subsystem sets this flag, and subsequent calls
     #: setup_logging_subsystem sets this flag, and subsequent calls
     #: will do nothing.
     #: will do nothing.
@@ -181,8 +185,7 @@ class Logging(object):
 
 
     def redirect_stdouts_to_logger(self, logger, loglevel=None,
     def redirect_stdouts_to_logger(self, logger, loglevel=None,
                                    stdout=True, stderr=True):
                                    stdout=True, stderr=True):
-        """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
-        logging instance.
+        """Redirect :class:`sys.stdout` and :class:`sys.stderr` to logger.
 
 
         Arguments:
         Arguments:
             logger (logging.Logger): Logger instance to redirect to.
             logger (logging.Logger): Logger instance to redirect to.
@@ -220,8 +223,7 @@ class Logging(object):
         return logger
         return logger
 
 
     def _detect_handler(self, logfile=None):
     def _detect_handler(self, logfile=None):
-        """Create log handler with either a filename, an open stream
-        or :const:`None` (stderr)."""
+        """Create handler from filename, an open stream or `None` (stderr)."""
         logfile = sys.__stderr__ if logfile is None else logfile
         logfile = sys.__stderr__ if logfile is None else logfile
         if hasattr(logfile, 'write'):
         if hasattr(logfile, 'write'):
             return logging.StreamHandler(logfile)
             return logging.StreamHandler(logfile)

+ 2 - 0
celery/app/registry.py

@@ -14,6 +14,8 @@ __all__ = ['TaskRegistry']
 
 
 
 
 class TaskRegistry(dict):
 class TaskRegistry(dict):
+    """Map of registered tasks."""
+
     NotRegistered = NotRegistered
     NotRegistered = NotRegistered
 
 
     def __missing__(self, key):
     def __missing__(self, key):

+ 2 - 2
celery/app/routes.py

@@ -57,6 +57,7 @@ class MapRoute(object):
 
 
 
 
 class Router(object):
 class Router(object):
+    """Route tasks based on the :setting:`task_routes` setting."""
 
 
     def __init__(self, routes=None, queues=None,
     def __init__(self, routes=None, queues=None,
                  create_missing=False, app=None):
                  create_missing=False, app=None):
@@ -121,8 +122,7 @@ def expand_router_string(router):
 
 
 
 
 def prepare(routes):
 def prepare(routes):
-    """Expands the :setting:`task_routes` setting."""
-
+    """Expand the :setting:`task_routes` setting."""
     def expand_route(route):
     def expand_route(route):
         if isinstance(route, (Mapping, list, tuple)):
         if isinstance(route, (Mapping, list, tuple)):
             return MapRoute(route)
             return MapRoute(route)

+ 35 - 17
celery/app/task.py

@@ -13,7 +13,8 @@ from celery import states
 from celery._state import _task_stack
 from celery._state import _task_stack
 from celery.canvas import signature
 from celery.canvas import signature
 from celery.exceptions import Ignore, MaxRetriesExceededError, Reject, Retry
 from celery.exceptions import Ignore, MaxRetriesExceededError, Reject, Retry
-from celery.five import class_property, items, python_2_unicode_compatible
+from celery.five import items, python_2_unicode_compatible
+from celery.local import class_property
 from celery.result import EagerResult
 from celery.result import EagerResult
 from celery.utils import abstract
 from celery.utils import abstract
 from celery.utils.functional import mattrgetter, maybe_list
 from celery.utils.functional import mattrgetter, maybe_list
@@ -62,6 +63,8 @@ def _reprtask(task, fmt=None, flags=None):
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class Context(object):
 class Context(object):
+    """Task request variables (Task.request)."""
+
     logfile = None
     logfile = None
     loglevel = None
     loglevel = None
     hostname = None
     hostname = None
@@ -140,10 +143,12 @@ class Context(object):
 class Task(object):
 class Task(object):
     """Task base class.
     """Task base class.
 
 
-    When called tasks apply the :meth:`run` method.  This method must
-    be defined by all tasks (that is unless the :meth:`__call__` method
-    is overridden).
+    Note:
+        When called tasks apply the :meth:`run` method.  This method must
+        be defined by all tasks (that is unless the :meth:`__call__` method
+        is overridden).
     """
     """
+
     __trace__ = None
     __trace__ = None
     __v2_compat__ = False  # set by old base in celery.task.base
     __v2_compat__ = False  # set by old base in celery.task.base
 
 
@@ -319,8 +324,12 @@ class Task(object):
 
 
     @classmethod
     @classmethod
     def on_bound(self, app):
     def on_bound(self, app):
-        """This method can be defined to do additional actions when the
-        task class is bound to an app."""
+        """Called when the task is bound to an app.
+
+        Note:
+            This class method can be defined to do additional actions when
+            the task class is bound to an app.
+        """
         pass
         pass
 
 
     @classmethod
     @classmethod
@@ -720,33 +729,43 @@ class Task(object):
                                            task_name=self.name, **kwargs)
                                            task_name=self.name, **kwargs)
 
 
     def signature(self, args=None, *starargs, **starkwargs):
     def signature(self, args=None, *starargs, **starkwargs):
-        """Return :class:`~celery.signature` object for
-        this task, wrapping arguments and execution options
-        for a single task invocation."""
+        """Create signature.
+
+        Returns:
+            :class:`~celery.signature`:  object for
+                this task, wrapping arguments and execution options
+                for a single task invocation.
+        """
         starkwargs.setdefault('app', self.app)
         starkwargs.setdefault('app', self.app)
         return signature(self, args, *starargs, **starkwargs)
         return signature(self, args, *starargs, **starkwargs)
     subtask = signature
     subtask = signature
 
 
     def s(self, *args, **kwargs):
     def s(self, *args, **kwargs):
-        """``.s(*a, **k) -> .signature(a, k)``"""
+        """Create signature.
+
+        Shortcut for ``.s(*a, **k) -> .signature(a, k)``.
+        """
         return self.signature(args, kwargs)
         return self.signature(args, kwargs)
 
 
     def si(self, *args, **kwargs):
     def si(self, *args, **kwargs):
-        """``.si(*a, **k) -> .signature(a, k, immutable=True)``"""
+        """Create immutable signature.
+
+        Shortcut for ``.si(*a, **k) -> .signature(a, k, immutable=True)``.
+        """
         return self.signature(args, kwargs, immutable=True)
         return self.signature(args, kwargs, immutable=True)
 
 
     def chunks(self, it, n):
     def chunks(self, it, n):
-        """Creates a :class:`~celery.canvas.chunks` task for this task."""
+        """Create a :class:`~celery.canvas.chunks` task for this task."""
         from celery import chunks
         from celery import chunks
         return chunks(self.s(), it, n, app=self.app)
         return chunks(self.s(), it, n, app=self.app)
 
 
     def map(self, it):
     def map(self, it):
-        """Creates a :class:`~celery.canvas.xmap` task from ``it``."""
+        """Create a :class:`~celery.canvas.xmap` task from ``it``."""
         from celery import xmap
         from celery import xmap
         return xmap(self.s(), it, app=self.app)
         return xmap(self.s(), it, app=self.app)
 
 
     def starmap(self, it):
     def starmap(self, it):
-        """Creates a :class:`~celery.canvas.xstarmap` task from ``it``."""
+        """Create a :class:`~celery.canvas.xstarmap` task from ``it``."""
         from celery import xstarmap
         from celery import xstarmap
         return xstarmap(self.s(), it, app=self.app)
         return xstarmap(self.s(), it, app=self.app)
 
 
@@ -756,8 +775,7 @@ class Task(object):
             return d.send(type_, uuid=req.id, **fields)
             return d.send(type_, uuid=req.id, **fields)
 
 
     def replace(self, sig):
     def replace(self, sig):
-        """Replace the current task, with a new task inheriting the
-        same task id.
+        """Replace this task, with a new task inheriting the task id.
 
 
         .. versionadded:: 4.0
         .. versionadded:: 4.0
 
 
@@ -907,7 +925,7 @@ class Task(object):
         self.request_stack.pop()
         self.request_stack.pop()
 
 
     def __repr__(self):
     def __repr__(self):
-        """`repr(task)`"""
+        """``repr(task)``."""
         return _reprtask(self, R_SELF_TASK if self.__self__ else R_INSTANCE)
         return _reprtask(self, R_SELF_TASK if self.__self__ else R_INSTANCE)
 
 
     def _get_request(self):
     def _get_request(self):

+ 10 - 4
celery/app/trace.py

@@ -114,8 +114,7 @@ trace_ok_t = namedtuple('trace_ok_t', ('retval', 'info', 'runtime', 'retstr'))
 
 
 
 
 def task_has_custom(task, attr):
 def task_has_custom(task, attr):
-    """Return true if the task or one of its bases
-    defines ``attr`` (excluding the one in BaseTask)."""
+    """Return true if the task overrides ``attr``."""
     return mro_lookup(task.__class__, attr, stop={BaseTask, object},
     return mro_lookup(task.__class__, attr, stop={BaseTask, object},
                       monkey_patched=['celery.app.task'])
                       monkey_patched=['celery.app.task'])
 
 
@@ -134,6 +133,8 @@ def get_log_policy(task, einfo, exc):
 
 
 
 
 class TraceInfo(object):
 class TraceInfo(object):
+    """Information about task execution."""
+
     __slots__ = ('state', 'retval')
     __slots__ = ('state', 'retval')
 
 
     def __init__(self, state, retval=None):
     def __init__(self, state, retval=None):
@@ -241,8 +242,10 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
                  Info=TraceInfo, eager=False, propagate=False, app=None,
                  Info=TraceInfo, eager=False, propagate=False, app=None,
                  monotonic=monotonic, truncate=truncate,
                  monotonic=monotonic, truncate=truncate,
                  trace_ok_t=trace_ok_t, IGNORE_STATES=IGNORE_STATES):
                  trace_ok_t=trace_ok_t, IGNORE_STATES=IGNORE_STATES):
-    """Return a function that traces task execution; catches all
-    exceptions and updates result backend with the state and result
+    """Return a function that traces task execution.
+
+    Catches all exceptions and updates result backend with the
+    state and result.
 
 
     If the call was successful, it saves the result to the task result
     If the call was successful, it saves the result to the task result
     backend, and sets the task status to `"SUCCESS"`.
     backend, and sets the task status to `"SUCCESS"`.
@@ -469,6 +472,7 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
 
 
 
 
 def trace_task(task, uuid, args, kwargs, request={}, **opts):
 def trace_task(task, uuid, args, kwargs, request={}, **opts):
+    """Trace task execution."""
     try:
     try:
         if task.__trace__ is None:
         if task.__trace__ is None:
             task.__trace__ = build_tracer(task.name, task, **opts)
             task.__trace__ = build_tracer(task.name, task, **opts)
@@ -535,6 +539,7 @@ def report_internal_error(task, exc):
 
 
 
 
 def setup_worker_optimizations(app, hostname=None):
 def setup_worker_optimizations(app, hostname=None):
+    """Setup worker related optimizations."""
     global trace_task_ret
     global trace_task_ret
 
 
     hostname = hostname or gethostname()
     hostname = hostname or gethostname()
@@ -569,6 +574,7 @@ def setup_worker_optimizations(app, hostname=None):
 
 
 
 
 def reset_worker_optimizations():
 def reset_worker_optimizations():
+    """Reset previously configured optimizations."""
     global trace_task_ret
     global trace_task_ret
     trace_task_ret = _trace_task_ret
     trace_task_ret = _trace_task_ret
     try:
     try:

+ 6 - 6
celery/app/utils.py

@@ -136,7 +136,7 @@ class Settings(ConfigurationView):
         return find(name, namespace)
         return find(name, namespace)
 
 
     def find_value_for_key(self, name, namespace='celery'):
     def find_value_for_key(self, name, namespace='celery'):
-        """Shortcut to ``get_by_parts(*find_option(name)[:-1])``"""
+        """Shortcut to ``get_by_parts(*find_option(name)[:-1])``."""
         return self.get_by_parts(*self.find_option(name, namespace)[:-1])
         return self.get_by_parts(*self.find_option(name, namespace)[:-1])
 
 
     def get_by_parts(self, *parts):
     def get_by_parts(self, *parts):
@@ -158,8 +158,7 @@ class Settings(ConfigurationView):
         })
         })
 
 
     def humanize(self, with_defaults=False, censored=True):
     def humanize(self, with_defaults=False, censored=True):
-        """Return a human readable string showing changes to the
-        configuration."""
+        """Return a human readable text showing configuration changes."""
         return '\n'.join(
         return '\n'.join(
             '{0}: {1}'.format(key, pretty(value, width=50))
             '{0}: {1}'.format(key, pretty(value, width=50))
             for key, value in items(self.table(with_defaults, censored)))
             for key, value in items(self.table(with_defaults, censored)))
@@ -264,18 +263,18 @@ class AppPickler(object):
 
 
 
 
 def _unpickle_app(cls, pickler, *args):
 def _unpickle_app(cls, pickler, *args):
-    """Rebuild app for versions 2.5+"""
+    """Rebuild app for versions 2.5+."""
     return pickler()(cls, *args)
     return pickler()(cls, *args)
 
 
 
 
 def _unpickle_app_v2(cls, kwargs):
 def _unpickle_app_v2(cls, kwargs):
-    """Rebuild app for versions 3.1+"""
+    """Rebuild app for versions 3.1+."""
     kwargs['set_as_current'] = False
     kwargs['set_as_current'] = False
     return cls(**kwargs)
     return cls(**kwargs)
 
 
 
 
 def filter_hidden_settings(conf):
 def filter_hidden_settings(conf):
-
+    """Filter sensitive settings."""
     def maybe_censor(key, value, mask='*' * 8):
     def maybe_censor(key, value, mask='*' * 8):
         if isinstance(value, Mapping):
         if isinstance(value, Mapping):
             return filter_hidden_settings(value)
             return filter_hidden_settings(value)
@@ -324,6 +323,7 @@ def bugreport(app):
 
 
 
 
 def find_app(app, symbol_by_name=symbol_by_name, imp=import_from_cwd):
 def find_app(app, symbol_by_name=symbol_by_name, imp=import_from_cwd):
+    """Find app by name."""
     from .base import Celery
     from .base import Celery
 
 
     try:
     try:

+ 2 - 4
celery/apps/beat.py

@@ -36,6 +36,7 @@ logger = get_logger('celery.beat')
 
 
 
 
 class Beat(object):
 class Beat(object):
+    """Beat as a service."""
 
 
     Service = beat.Service
     Service = beat.Service
     app = None
     app = None
@@ -147,11 +148,8 @@ class Beat(object):
         )
         )
 
 
     def install_sync_handler(self, service):
     def install_sync_handler(self, service):
-        """Install a `SIGTERM` + `SIGINT` handler that saves
-        the beat schedule."""
-
+        """Install a `SIGTERM` + `SIGINT` handler saving the schedule."""
         def _sync(signum, frame):
         def _sync(signum, frame):
             service.sync()
             service.sync()
             raise SystemExit()
             raise SystemExit()
-
         platforms.signals.update(SIGTERM=_sync, SIGINT=_sync)
         platforms.signals.update(SIGTERM=_sync, SIGINT=_sync)

+ 3 - 0
celery/apps/multi.py

@@ -1,3 +1,4 @@
+"""Start/stop/manage workers."""
 from __future__ import absolute_import, unicode_literals
 from __future__ import absolute_import, unicode_literals
 
 
 import errno
 import errno
@@ -125,6 +126,7 @@ class NamespacedOptionParser(object):
 
 
 
 
 class Node(object):
 class Node(object):
+    """Represents a node in a cluster."""
 
 
     def __init__(self, name,
     def __init__(self, name,
                  cmd=None, append=None, options=None, extra_args=None):
                  cmd=None, append=None, options=None, extra_args=None):
@@ -345,6 +347,7 @@ class MultiParser(object):
 
 
 
 
 class Cluster(UserList):
 class Cluster(UserList):
+    """Represent a cluster of workers."""
 
 
     def __init__(self, nodes, cmd=None, env=None,
     def __init__(self, nodes, cmd=None, env=None,
                  on_stopping_preamble=None,
                  on_stopping_preamble=None,

+ 2 - 1
celery/apps/worker.py

@@ -88,6 +88,7 @@ EXTRA_INFO_FMT = """
 
 
 
 
 class Worker(WorkController):
 class Worker(WorkController):
+    """Worker as a program."""
 
 
     def on_before_init(self, quiet=False, **kwargs):
     def on_before_init(self, quiet=False, **kwargs):
         self.quiet = quiet
         self.quiet = quiet
@@ -249,7 +250,7 @@ class Worker(WorkController):
         install_rdb_handler()
         install_rdb_handler()
 
 
     def macOS_proxy_detection_workaround(self):
     def macOS_proxy_detection_workaround(self):
-        """See https://github.com/celery/celery/issues#issue/161"""
+        """See https://github.com/celery/celery/issues#issue/161."""
         os.environ.setdefault('celery_dummy_proxy', 'set_by_celeryd')
         os.environ.setdefault('celery_dummy_proxy', 'set_by_celeryd')
 
 
     def set_process_status(self, info):
     def set_process_status(self, info):

+ 2 - 1
celery/backends/__init__.py

@@ -41,7 +41,7 @@ default_backend = Proxy(lambda: current_app.backend)
 
 
 def get_backend_cls(backend=None, loader=None,
 def get_backend_cls(backend=None, loader=None,
                     extension_namespace='celery.result_backends'):
                     extension_namespace='celery.result_backends'):
-    """Get backend class by name/alias"""
+    """Get backend class by name/alias."""
     backend = backend or 'disabled'
     backend = backend or 'disabled'
     loader = loader or current_app.loader
     loader = loader or current_app.loader
     aliases = dict(BACKEND_ALIASES, **loader.override_backends)
     aliases = dict(BACKEND_ALIASES, **loader.override_backends)
@@ -59,6 +59,7 @@ def get_backend_cls(backend=None, loader=None,
 
 
 
 
 def get_backend_by_url(backend=None, loader=None):
 def get_backend_by_url(backend=None, loader=None):
+    """Get backend class by URL."""
     url = None
     url = None
     if backend and '://' in backend:
     if backend and '://' in backend:
         url = backend
         url = backend

+ 4 - 0
celery/backends/amqp.py

@@ -40,6 +40,10 @@ class NoCacheQueue(Queue):
 
 
 
 
 class AMQPBackend(BaseBackend):
 class AMQPBackend(BaseBackend):
+    """The AMQP result backend.
+
+    Deprecated: Please use the RPC backend or a persistent backend.
+    """
 
 
     Exchange = Exchange
     Exchange = Exchange
     Queue = NoCacheQueue
     Queue = NoCacheQueue

+ 9 - 1
celery/backends/async.py

@@ -16,11 +16,16 @@ from celery.exceptions import TimeoutError
 from celery.five import Empty, monotonic
 from celery.five import Empty, monotonic
 from celery.utils.threads import THREAD_TIMEOUT_MAX
 from celery.utils.threads import THREAD_TIMEOUT_MAX
 
 
+__all__ = [
+    'AsyncBackendMixin', 'BaseResultConsumer', 'Drainer',
+    'register_drainer',
+]
+
 drainers = {}
 drainers = {}
 
 
 
 
 def register_drainer(name):
 def register_drainer(name):
-
+    """Decorator used to register a new result drainer type."""
     def _inner(cls):
     def _inner(cls):
         drainers[name] = cls
         drainers[name] = cls
         return cls
         return cls
@@ -29,6 +34,7 @@ def register_drainer(name):
 
 
 @register_drainer('default')
 @register_drainer('default')
 class Drainer(object):
 class Drainer(object):
+    """Result draining service."""
 
 
     def __init__(self, result_consumer):
     def __init__(self, result_consumer):
         self.result_consumer = result_consumer
         self.result_consumer = result_consumer
@@ -114,6 +120,7 @@ class geventDrainer(greenletDrainer):
 
 
 
 
 class AsyncBackendMixin(object):
 class AsyncBackendMixin(object):
+    """Mixin for backends that enables the async API."""
 
 
     def _collect_into(self, result, bucket):
     def _collect_into(self, result, bucket):
         self.result_consumer.buckets[result] = bucket
         self.result_consumer.buckets[result] = bucket
@@ -198,6 +205,7 @@ class AsyncBackendMixin(object):
 
 
 
 
 class BaseResultConsumer(object):
 class BaseResultConsumer(object):
+    """Manager responsible for consuming result messages."""
 
 
     def __init__(self, backend, app, accept,
     def __init__(self, backend, app, accept,
                  pending_results, pending_messages):
                  pending_results, pending_messages):

+ 7 - 5
celery/backends/base.py

@@ -119,7 +119,7 @@ class Backend(object):
         self.url = url
         self.url = url
 
 
     def as_uri(self, include_password=False):
     def as_uri(self, include_password=False):
-        """Return the backend as an URI, sanitizing the password or not"""
+        """Return the backend as an URI, sanitizing the password or not."""
         # when using maybe_sanitize_url(), "/" is added
         # when using maybe_sanitize_url(), "/" is added
         # we're stripping it for consistency
         # we're stripping it for consistency
         if include_password:
         if include_password:
@@ -128,7 +128,7 @@ class Backend(object):
         return url[:-1] if url.endswith(':///') else url
         return url[:-1] if url.endswith(':///') else url
 
 
     def mark_as_started(self, task_id, **meta):
     def mark_as_started(self, task_id, **meta):
-        """Mark a task as started"""
+        """Mark a task as started."""
         return self.store_result(task_id, meta, states.STARTED)
         return self.store_result(task_id, meta, states.STARTED)
 
 
     def mark_as_done(self, task_id, result,
     def mark_as_done(self, task_id, result,
@@ -487,7 +487,7 @@ class SyncBackendMixin(object):
 
 
 
 
 class BaseBackend(Backend, SyncBackendMixin):
 class BaseBackend(Backend, SyncBackendMixin):
-    pass
+    """Base (synchronous) result backend."""
 BaseDictBackend = BaseBackend  # XXX compat
 BaseDictBackend = BaseBackend  # XXX compat
 
 
 
 
@@ -551,7 +551,7 @@ class BaseKeyValueStoreBackend(Backend):
         ])
         ])
 
 
     def _strip_prefix(self, key):
     def _strip_prefix(self, key):
-        """Takes bytes, emits string."""
+        """Take bytes: emit string."""
         key = self.key_t(key)
         key = self.key_t(key)
         for prefix in self.task_keyprefix, self.group_keyprefix:
         for prefix in self.task_keyprefix, self.group_keyprefix:
             if key.startswith(prefix):
             if key.startswith(prefix):
@@ -731,10 +731,12 @@ class BaseKeyValueStoreBackend(Backend):
 
 
 
 
 class KeyValueStoreBackend(BaseKeyValueStoreBackend, SyncBackendMixin):
 class KeyValueStoreBackend(BaseKeyValueStoreBackend, SyncBackendMixin):
-    pass
+    """Result backend base class for key/value stores."""
 
 
 
 
 class DisabledBackend(BaseBackend):
 class DisabledBackend(BaseBackend):
+    """Dummy result backend."""
+
     _cache = {}   # need this attribute to reset cache in tests.
     _cache = {}   # need this attribute to reset cache in tests.
 
 
     def store_result(self, *args, **kwargs):
     def store_result(self, *args, **kwargs):

+ 1 - 0
celery/backends/cache.py

@@ -88,6 +88,7 @@ backends = {
 
 
 
 
 class CacheBackend(KeyValueStoreBackend):
 class CacheBackend(KeyValueStoreBackend):
+    """Cache result backend."""
 
 
     servers = None
     servers = None
     supports_autoexpire = True
     supports_autoexpire = True

+ 2 - 2
celery/backends/cassandra.py

@@ -66,7 +66,7 @@ else:
 
 
 
 
 class CassandraBackend(BaseBackend):
 class CassandraBackend(BaseBackend):
-    """Cassandra backend utilizing DataStax driver
+    """Cassandra backend utilizing DataStax driver.
 
 
     Raises:
     Raises:
         celery.exceptions.ImproperlyConfigured:
         celery.exceptions.ImproperlyConfigured:
@@ -132,7 +132,7 @@ class CassandraBackend(BaseBackend):
         self._session = None
         self._session = None
 
 
     def _get_connection(self, write=False):
     def _get_connection(self, write=False):
-        """Prepare the connection for action
+        """Prepare the connection for action.
 
 
         Arguments:
         Arguments:
             write (bool): are we a writer?
             write (bool): are we a writer?

+ 2 - 1
celery/backends/consul.py

@@ -28,6 +28,7 @@ the Consul result store backend."""
 
 
 class ConsulBackend(KeyValueStoreBackend):
 class ConsulBackend(KeyValueStoreBackend):
     """Consul.io K/V store backend for Celery."""
     """Consul.io K/V store backend for Celery."""
+
     consul = consul
     consul = consul
 
 
     supports_autoexpire = True
     supports_autoexpire = True
@@ -70,7 +71,7 @@ class ConsulBackend(KeyValueStoreBackend):
             yield self.get(key)
             yield self.get(key)
 
 
     def set(self, key, value):
     def set(self, key, value):
-        """Set a key in Consul
+        """Set a key in Consul.
 
 
         Before creating the key it will create a session inside Consul
         Before creating the key it will create a session inside Consul
         where it creates a session with a TTL
         where it creates a session with a TTL

+ 1 - 0
celery/backends/database/__init__.py

@@ -64,6 +64,7 @@ def retry(fun):
 
 
 class DatabaseBackend(BaseBackend):
 class DatabaseBackend(BaseBackend):
     """The database result backend."""
     """The database result backend."""
+
     # ResultSet.iterate should sleep this much between each pool,
     # ResultSet.iterate should sleep this much between each pool,
     # to not bombard the database with queries.
     # to not bombard the database with queries.
     subpolling_interval = 0.5
     subpolling_interval = 0.5

+ 1 - 1
celery/backends/database/models.py

@@ -49,7 +49,7 @@ class Task(ResultModelBase):
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class TaskSet(ResultModelBase):
 class TaskSet(ResultModelBase):
-    """TaskSet result"""
+    """TaskSet result."""
 
 
     __tablename__ = 'celery_tasksetmeta'
     __tablename__ = 'celery_tasksetmeta'
     __table_args__ = {'sqlite_autoincrement': True}
     __table_args__ = {'sqlite_autoincrement': True}

+ 1 - 0
celery/backends/database/session.py

@@ -19,6 +19,7 @@ def _after_fork_cleanup_session(session):
 
 
 
 
 class SessionManager(object):
 class SessionManager(object):
+    """Manage SQLAlchemy sessions."""
 
 
     def __init__(self):
     def __init__(self):
         self._engines = {}
         self._engines = {}

+ 4 - 2
celery/backends/mongodb.py

@@ -258,8 +258,10 @@ class MongoBackend(BaseBackend):
 
 
     @cached_property
     @cached_property
     def database(self):
     def database(self):
-        """Get database from MongoDB connection and perform authentication
-        if necessary."""
+        """Get database from MongoDB connection.
+
+        performs authentication if necessary.
+        """
         return self._get_database()
         return self._get_database()
 
 
     @cached_property
     @cached_property

+ 2 - 0
celery/backends/riak.py

@@ -17,6 +17,8 @@ from celery.exceptions import ImproperlyConfigured
 
 
 from .base import KeyValueStoreBackend
 from .base import KeyValueStoreBackend
 
 
+__all__ = ['RiakBackend']
+
 E_BUCKET_NAME = """\
 E_BUCKET_NAME = """\
 Riak bucket names must be composed of ASCII characters only, not: {0!r}\
 Riak bucket names must be composed of ASCII characters only, not: {0!r}\
 """
 """

+ 3 - 0
celery/backends/rpc.py

@@ -88,6 +88,7 @@ class ResultConsumer(BaseResultConsumer):
 
 
 
 
 class BaseRPCBackend(base.Backend, AsyncBackendMixin):
 class BaseRPCBackend(base.Backend, AsyncBackendMixin):
+    """Base class for the RPC result backend."""
 
 
     Exchange = Exchange
     Exchange = Exchange
     Queue = NoCacheQueue
     Queue = NoCacheQueue
@@ -263,6 +264,8 @@ class BaseRPCBackend(base.Backend, AsyncBackendMixin):
 
 
 
 
 class RPCBackend(BaseRPCBackend):
 class RPCBackend(BaseRPCBackend):
+    """RPC result backend."""
+
     persistent = False
     persistent = False
 
 
     class Consumer(Consumer):
     class Consumer(Consumer):

+ 5 - 3
celery/beat.py

@@ -103,8 +103,7 @@ class ScheduleEntry(object):
         return self.schedule.now() if self.schedule else self.app.now()
         return self.schedule.now() if self.schedule else self.app.now()
 
 
     def _next_instance(self, last_run_at=None):
     def _next_instance(self, last_run_at=None):
-        """Return a new instance of the same class, but with
-        its date and count fields updated."""
+        """Return new instance, with date and count fields updated."""
         return self.__class__(**dict(
         return self.__class__(**dict(
             self,
             self,
             last_run_at=last_run_at or self._default_now(),
             last_run_at=last_run_at or self._default_now(),
@@ -243,7 +242,6 @@ class Scheduler(object):
         Returns:
         Returns:
             float: preferred delay in seconds for next call.
             float: preferred delay in seconds for next call.
         """
         """
-
         def _when(entry, next_time_to_run):
         def _when(entry, next_time_to_run):
             return (mktime(entry.schedule.now().timetuple()) +
             return (mktime(entry.schedule.now().timetuple()) +
                     (adjust(next_time_to_run) or 0))
                     (adjust(next_time_to_run) or 0))
@@ -396,6 +394,8 @@ class Scheduler(object):
 
 
 
 
 class PersistentScheduler(Scheduler):
 class PersistentScheduler(Scheduler):
+    """Scheduler backed by :mod:`shelve` database."""
+
     persistence = shelve
     persistence = shelve
     known_suffixes = ('', '.db', '.dat', '.bak', '.dir')
     known_suffixes = ('', '.db', '.dat', '.bak', '.dir')
 
 
@@ -498,6 +498,8 @@ class PersistentScheduler(Scheduler):
 
 
 
 
 class Service(object):
 class Service(object):
+    """Celery periodic task service."""
+
     scheduler_cls = PersistentScheduler
     scheduler_cls = PersistentScheduler
 
 
     def __init__(self, app, max_interval=None, schedule_filename=None,
     def __init__(self, app, max_interval=None, schedule_filename=None,

+ 4 - 1
celery/bin/amqp.py

@@ -120,6 +120,7 @@ class AMQShell(cmd.Cmd):
         silent (bool): If enabled, the commands won't have annoying
         silent (bool): If enabled, the commands won't have annoying
             output not relevant when running in non-shell mode.
             output not relevant when running in non-shell mode.
     """
     """
+
     conn = None
     conn = None
     chan = None
     chan = None
     prompt_fmt = '{self.counter}> '
     prompt_fmt = '{self.counter}> '
@@ -197,7 +198,9 @@ class AMQShell(cmd.Cmd):
         say(m, file=self.out)
         say(m, file=self.out)
 
 
     def get_amqp_api_command(self, cmd, arglist):
     def get_amqp_api_command(self, cmd, arglist):
-        """With a command name and a list of arguments, convert the arguments
+        """Get AMQP command wrapper.
+
+        With a command name and a list of arguments, convert the arguments
         to Python values and find the corresponding method on the AMQP channel
         to Python values and find the corresponding method on the AMQP channel
         object.
         object.
 
 

+ 11 - 4
celery/bin/base.py

@@ -55,6 +55,8 @@ find_rst_decl = re.compile(r'^\s*\.\. .+?::.+$')
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class Error(Exception):
 class Error(Exception):
+    """Exception raised by commands."""
+
     status = EX_FAILURE
     status = EX_FAILURE
 
 
     def __init__(self, reason, status=None):
     def __init__(self, reason, status=None):
@@ -67,10 +69,13 @@ class Error(Exception):
 
 
 
 
 class UsageError(Error):
 class UsageError(Error):
+    """Exception raised for malformed arguments."""
+
     status = EX_USAGE
     status = EX_USAGE
 
 
 
 
 class Extensions(object):
 class Extensions(object):
+    """Loads extensions from setuptools entrypoints."""
 
 
     def __init__(self, namespace, register):
     def __init__(self, namespace, register):
         self.names = []
         self.names = []
@@ -88,6 +93,7 @@ class Extensions(object):
 
 
 
 
 class HelpFormatter(IndentedHelpFormatter):
 class HelpFormatter(IndentedHelpFormatter):
+    """Custom help formatter."""
 
 
     def format_epilog(self, epilog):
     def format_epilog(self, epilog):
         if epilog:
         if epilog:
@@ -183,7 +189,6 @@ class Command(object):
             self.on_usage_error = on_usage_error
             self.on_usage_error = on_usage_error
 
 
     def run(self, *args, **options):
     def run(self, *args, **options):
-        """This is the body of the command called by :meth:`handle_argv`."""
         raise NotImplementedError('subclass responsibility')
         raise NotImplementedError('subclass responsibility')
 
 
     def on_error(self, exc):
     def on_error(self, exc):
@@ -293,8 +298,7 @@ class Command(object):
         return default
         return default
 
 
     def handle_argv(self, prog_name, argv, command=None):
     def handle_argv(self, prog_name, argv, command=None):
-        """Parse command-line arguments from ``argv`` and dispatch
-        to :meth:`run`.
+        """Parse arguments from argv and dispatch to :meth:`run`.
 
 
         Warning:
         Warning:
             Exits with an error message if :attr:`supports_args` is disabled
             Exits with an error message if :attr:`supports_args` is disabled
@@ -519,7 +523,9 @@ class Command(object):
         )
         )
 
 
     def with_pool_option(self, argv):
     def with_pool_option(self, argv):
-        """Return tuple of ``(short_opts, long_opts)`` if the command
+        """Return tuple of ``(short_opts, long_opts)``.
+
+        Returns only if the command
         supports a pool argument, and used to monkey patch eventlet/gevent
         supports a pool argument, and used to monkey patch eventlet/gevent
         environments as early as possible.
         environments as early as possible.
 
 
@@ -608,6 +614,7 @@ class Command(object):
 
 
 
 
 def daemon_options(parser, default_pidfile=None, default_logfile=None):
 def daemon_options(parser, default_pidfile=None, default_logfile=None):
+    """Add daemon options to optparse parser."""
     group = OptionGroup(parser, 'Daemonization Options')
     group = OptionGroup(parser, 'Daemonization Options')
     group.add_option('-f', '--logfile', default=default_logfile),
     group.add_option('-f', '--logfile', default=default_logfile),
     group.add_option('--pidfile', default=default_pidfile),
     group.add_option('--pidfile', default=default_pidfile),

+ 1 - 0
celery/bin/beat.py

@@ -88,6 +88,7 @@ class beat(Command):
     The last example requires the :pypi:`django-celery-beat` extension
     The last example requires the :pypi:`django-celery-beat` extension
     package found on PyPI.
     package found on PyPI.
     """
     """
+
     doc = __doc__
     doc = __doc__
     enable_config_from_cmdline = True
     enable_config_from_cmdline = True
     supports_args = False
     supports_args = False

+ 5 - 2
celery/bin/celery.py

@@ -320,6 +320,7 @@ def determine_exit_status(ret):
 
 
 
 
 def main(argv=None):
 def main(argv=None):
+    """Start celery umbrella command."""
     # Fix for setuptools generated scripts, so that it will
     # Fix for setuptools generated scripts, so that it will
     # work with multiprocessing fork emulation.
     # work with multiprocessing fork emulation.
     # (see multiprocessing.forking.get_preparation_data())
     # (see multiprocessing.forking.get_preparation_data())
@@ -337,6 +338,7 @@ def main(argv=None):
 
 
 class multi(Command):
 class multi(Command):
     """Start multiple worker instances."""
     """Start multiple worker instances."""
+
     respects_app_option = False
     respects_app_option = False
 
 
     def get_options(self):
     def get_options(self):
@@ -1030,6 +1032,7 @@ class report(Command):
 
 
 
 
 class CeleryCommand(Command):
 class CeleryCommand(Command):
+    """Base class for commands."""
 
 
     commands = {
     commands = {
         'amqp': amqp,
         'amqp': amqp,
@@ -1202,8 +1205,8 @@ class CeleryCommand(Command):
 
 
 
 
 def command(*args, **kwargs):
 def command(*args, **kwargs):
-    """Deprecated: Use classmethod :meth:`CeleryCommand.register_command`
-    instead."""
+    # Deprecated: Use classmethod
+    #             :meth:`CeleryCommand.register_command` instead.
     _register = CeleryCommand.register_command
     _register = CeleryCommand.register_command
     return _register(args[0]) if args else _register
     return _register(args[0]) if args else _register
 
 

+ 3 - 0
celery/bin/celeryd_detach.py

@@ -29,6 +29,7 @@ C_FAKEFORK = os.environ.get('C_FAKEFORK')
 def detach(path, argv, logfile=None, pidfile=None, uid=None,
 def detach(path, argv, logfile=None, pidfile=None, uid=None,
            gid=None, umask=None, workdir=None, fake=False, app=None,
            gid=None, umask=None, workdir=None, fake=False, app=None,
            executable=None, hostname=None):
            executable=None, hostname=None):
+    """Detach program by argv'."""
     hostname = default_nodename(hostname)
     hostname = default_nodename(hostname)
     logfile = node_format(logfile, hostname)
     logfile = node_format(logfile, hostname)
     pidfile = node_format(pidfile, hostname)
     pidfile = node_format(pidfile, hostname)
@@ -107,6 +108,8 @@ class PartialOptionParser(OptionParser):
 
 
 
 
 class detached_celeryd(object):
 class detached_celeryd(object):
+    """Daemonize the celery worker process."""
+
     usage = '%prog [options] [celeryd options]'
     usage = '%prog [options] [celeryd options]'
     version = celery.VERSION_BANNER
     version = celery.VERSION_BANNER
     description = ('Detaches Celery worker nodes.  See `celery worker --help` '
     description = ('Detaches Celery worker nodes.  See `celery worker --help` '

+ 1 - 0
celery/bin/events.py

@@ -98,6 +98,7 @@ class events(Command):
             $ celery events -d
             $ celery events -d
             $ celery events -c mod.attr -F 1.0 --detach --maxrate=100/m -l info
             $ celery events -c mod.attr -F 1.0 --detach --maxrate=100/m -l info
     """
     """
+
     doc = __doc__
     doc = __doc__
     supports_args = False
     supports_args = False
 
 

+ 2 - 0
celery/bin/graph.py

@@ -16,6 +16,8 @@ __all__ = ['graph']
 
 
 
 
 class graph(Command):
 class graph(Command):
+    """The ``celery graph`` command."""
+
     args = """<TYPE> [arguments]
     args = """<TYPE> [arguments]
             .....  bootsteps [worker] [consumer]
             .....  bootsteps [worker] [consumer]
             .....  workers   [enumerate]
             .....  workers   [enumerate]

+ 2 - 0
celery/bin/logtool.py

@@ -119,6 +119,8 @@ class Audit(object):
 
 
 
 
 class logtool(Command):
 class logtool(Command):
+    """The ``celery logtool`` command."""
+
     args = """<action> [arguments]
     args = """<action> [arguments]
             .....  stats      [file1|- [file2 [...]]]
             .....  stats      [file1|- [file2 [...]]]
             .....  traces     [file1|- [file2 [...]]]
             .....  traces     [file1|- [file2 [...]]]

+ 2 - 0
celery/bin/multi.py

@@ -220,6 +220,8 @@ class TermLogger(object):
 
 
 
 
 class MultiTool(TermLogger):
 class MultiTool(TermLogger):
+    """The ``celery multi`` program."""
+
     MultiParser = MultiParser
     MultiParser = MultiParser
     OptionParser = NamespacedOptionParser
     OptionParser = NamespacedOptionParser
 
 

+ 2 - 0
celery/bin/worker.py

@@ -197,6 +197,7 @@ class worker(Command):
             $ celery worker -A proj --concurrency=4
             $ celery worker -A proj --concurrency=4
             $ celery worker -A proj --concurrency=1000 -P eventlet
             $ celery worker -A proj --concurrency=1000 -P eventlet
     """
     """
+
     doc = __MODULE_DOC__  # parse help from this too
     doc = __MODULE_DOC__  # parse help from this too
     namespace = 'worker'
     namespace = 'worker'
     enable_config_from_cmdline = True
     enable_config_from_cmdline = True
@@ -345,6 +346,7 @@ class worker(Command):
 
 
 
 
 def main(app=None):
 def main(app=None):
+    """Start worker."""
     # Fix for setuptools generated scripts, so that it will
     # Fix for setuptools generated scripts, so that it will
     # work with multiprocessing fork emulation.
     # work with multiprocessing fork emulation.
     # (see multiprocessing.forking.get_preparation_data())
     # (see multiprocessing.forking.get_preparation_data())

+ 9 - 2
celery/bootsteps.py

@@ -86,6 +86,7 @@ class Blueprint(object):
         on_stopped (Callable): Optional callback applied after
         on_stopped (Callable): Optional callback applied after
             blueprint stopped.
             blueprint stopped.
     """
     """
+
     GraphFormatter = StepFormatter
     GraphFormatter = StepFormatter
 
 
     name = None
     name = None
@@ -325,8 +326,11 @@ class Step(object):
         pass
         pass
 
 
     def include_if(self, parent):
     def include_if(self, parent):
-        """An optional predicate that decides whether this
-        step should be created."""
+        """Return true if bootstep should be included.
+
+        You can define this as an optional predicate that decides whether
+        this step should be created.
+        """
         return self.enabled
         return self.enabled
 
 
     def instantiate(self, name, *args, **kwargs):
     def instantiate(self, name, *args, **kwargs):
@@ -356,6 +360,7 @@ class Step(object):
 
 
 
 
 class StartStopStep(Step):
 class StartStopStep(Step):
+    """Bootstep that must be started and stopped in order."""
 
 
     #: Optional obj created by the :meth:`create` method.
     #: Optional obj created by the :meth:`create` method.
     #: This is used by :class:`StartStopStep` to keep the
     #: This is used by :class:`StartStopStep` to keep the
@@ -386,6 +391,8 @@ class StartStopStep(Step):
 
 
 
 
 class ConsumerStep(StartStopStep):
 class ConsumerStep(StartStopStep):
+    """Bootstep that starts a message consumer."""
+
     requires = ('celery.worker.consumer:Connection',)
     requires = ('celery.worker.consumer:Connection',)
     consumers = None
     consumers = None
 
 

+ 37 - 13
celery/canvas.py

@@ -121,7 +121,9 @@ def _upgrade(fields, sig):
 @abstract.CallableSignature.register
 @abstract.CallableSignature.register
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class Signature(dict):
 class Signature(dict):
-    """Class that wraps the arguments and execution options
+    """Task Signature.
+
+    Class that wraps the arguments and execution options
     for a single task invocation.
     for a single task invocation.
 
 
     Used as the parts in a :class:`group` and other constructs,
     Used as the parts in a :class:`group` and other constructs,
@@ -226,8 +228,11 @@ class Signature(dict):
         return self.apply_async(partial_args, partial_kwargs)
         return self.apply_async(partial_args, partial_kwargs)
 
 
     def apply(self, args=(), kwargs={}, **options):
     def apply(self, args=(), kwargs={}, **options):
-        """Same as :meth:`apply_async` but executed the task inline instead
-        of sending a task message."""
+        """Call task locally.
+
+        Same as :meth:`apply_async` but executed the task inline instead
+        of sending a task message.
+        """
         # For callbacks: extra args are prepended to the stored args.
         # For callbacks: extra args are prepended to the stored args.
         args, kwargs, options = self._merge(args, kwargs, options)
         args, kwargs, options = self._merge(args, kwargs, options)
         return self.type.apply(args, kwargs, **options)
         return self.type.apply(args, kwargs, **options)
@@ -321,8 +326,10 @@ class Signature(dict):
 
 
     def replace(self, args=None, kwargs=None, options=None):
     def replace(self, args=None, kwargs=None, options=None):
         """Replace the args, kwargs or options set for this signature.
         """Replace the args, kwargs or options set for this signature.
+
         These are only replaced if the argument for the section is
         These are only replaced if the argument for the section is
-        not :const:`None`."""
+        not :const:`None`.
+        """
         s = self.clone()
         s = self.clone()
         if args is not None:
         if args is not None:
             s.args = args
             s.args = args
@@ -367,8 +374,7 @@ class Signature(dict):
         items.extend(maybe_list(value))
         items.extend(maybe_list(value))
 
 
     def link(self, callback):
     def link(self, callback):
-        """Add a callback task to be applied if this task
-        executes successfully.
+        """Add callback task to be applied if this task succeeds.
 
 
         Returns:
         Returns:
             Signature: the argument passed, for chaining
             Signature: the argument passed, for chaining
@@ -377,8 +383,7 @@ class Signature(dict):
         return self.append_to_list_option('link', callback)
         return self.append_to_list_option('link', callback)
 
 
     def link_error(self, errback):
     def link_error(self, errback):
-        """Add a callback task to be applied if an error occurs
-        while executing this task.
+        """Add callback task to be applied on error in task execution.
 
 
         Returns:
         Returns:
             Signature: the argument passed, for chaining
             Signature: the argument passed, for chaining
@@ -400,8 +405,10 @@ class Signature(dict):
         return self
         return self
 
 
     def flatten_links(self):
     def flatten_links(self):
-        """Return a recursive list of dependencies (unchain if you will,
-        but with links intact)."""
+        """Return a recursive list of dependencies.
+
+        "unchain" if you will, but with links intact.
+        """
         return list(_chain.from_iterable(_chain(
         return list(_chain.from_iterable(_chain(
             [[self]],
             [[self]],
             (link.flatten_links()
             (link.flatten_links()
@@ -507,7 +514,9 @@ class Signature(dict):
 @Signature.register_type
 @Signature.register_type
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class chain(Signature):
 class chain(Signature):
-    """Chains tasks together, so that each tasks follows each other
+    """Chain tasks together.
+
+    Each tasks follows one another,
     by being applied as a callback of the previous task.
     by being applied as a callback of the previous task.
 
 
     Note:
     Note:
@@ -551,6 +560,7 @@ class chain(Signature):
             task in the chain.  When that task succeeed the next task in the
             task in the chain.  When that task succeeed the next task in the
             chain is applied, and so on.
             chain is applied, and so on.
     """
     """
+
     tasks = _getitem_property('kwargs.tasks', 'Tasks in chain.')
     tasks = _getitem_property('kwargs.tasks', 'Tasks in chain.')
 
 
     def __init__(self, *tasks, **options):
     def __init__(self, *tasks, **options):
@@ -772,6 +782,13 @@ class _basemap(Signature):
 @Signature.register_type
 @Signature.register_type
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class xmap(_basemap):
 class xmap(_basemap):
+    """Map operation for tasks.
+
+    Note:
+        Tasks executed sequentially in process, this is not a
+        parallel operation like :class:`group`.
+    """
+
     _task_name = 'celery.map'
     _task_name = 'celery.map'
 
 
     def __repr__(self):
     def __repr__(self):
@@ -783,6 +800,8 @@ class xmap(_basemap):
 @Signature.register_type
 @Signature.register_type
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class xstarmap(_basemap):
 class xstarmap(_basemap):
+    """Map operation for tasks, using star arguments."""
+
     _task_name = 'celery.starmap'
     _task_name = 'celery.starmap'
 
 
     def __repr__(self):
     def __repr__(self):
@@ -793,6 +812,8 @@ class xstarmap(_basemap):
 
 
 @Signature.register_type
 @Signature.register_type
 class chunks(Signature):
 class chunks(Signature):
+    """Partition of tasks in n chunks."""
+
     _unpack_args = itemgetter('task', 'it', 'n')
     _unpack_args = itemgetter('task', 'it', 'n')
 
 
     def __init__(self, task, it, n, **options):
     def __init__(self, task, it, n, **options):
@@ -874,6 +895,7 @@ class group(Signature):
             tasks in the group (and return a :class:`GroupResult` instance
             tasks in the group (and return a :class:`GroupResult` instance
             that can be used to inspect the state of the group).
             that can be used to inspect the state of the group).
     """
     """
+
     tasks = _getitem_property('kwargs.tasks', 'Tasks in group.')
     tasks = _getitem_property('kwargs.tasks', 'Tasks in group.')
 
 
     def __init__(self, *tasks, **options):
     def __init__(self, *tasks, **options):
@@ -1072,7 +1094,7 @@ class group(Signature):
 @Signature.register_type
 @Signature.register_type
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class chord(Signature):
 class chord(Signature):
-    """Barrier synchronization primitive.
+    r"""Barrier synchronization primitive.
 
 
     A chord consists of a header and a body.
     A chord consists of a header and a body.
 
 
@@ -1097,6 +1119,7 @@ class chord(Signature):
             >>> res.get()
             >>> res.get()
             12
             12
     """
     """
+
     def __init__(self, header, body=None, task='celery.chord',
     def __init__(self, header, body=None, task='celery.chord',
                  args=(), kwargs={}, app=None, **options):
                  args=(), kwargs={}, app=None, **options):
         Signature.__init__(
         Signature.__init__(
@@ -1247,7 +1270,7 @@ class chord(Signature):
 
 
 
 
 def signature(varies, *args, **kwargs):
 def signature(varies, *args, **kwargs):
-    """Create new signature
+    """Create new signature.
 
 
     - if the first argument is a signature already then it's cloned.
     - if the first argument is a signature already then it's cloned.
     - if the first argument is a dict, then a Signature version is returned.
     - if the first argument is a dict, then a Signature version is returned.
@@ -1265,6 +1288,7 @@ subtask = signature   # XXX compat
 
 
 
 
 def maybe_signature(d, app=None):
 def maybe_signature(d, app=None):
+    """Ensure obj is a signature, or None."""
     if d is not None:
     if d is not None:
         if (isinstance(d, dict) and
         if (isinstance(d, dict) and
                 not isinstance(d, abstract.CallableSignature)):
                 not isinstance(d, abstract.CallableSignature)):

+ 1 - 0
celery/concurrency/__init__.py

@@ -19,4 +19,5 @@ ALIASES = {
 
 
 
 
 def get_implementation(cls):
 def get_implementation(cls):
+    """Return pool implementation by name."""
     return symbol_by_name(cls, ALIASES)
     return symbol_by_name(cls, ALIASES)

+ 29 - 31
celery/concurrency/asynpool.py

@@ -73,6 +73,7 @@ except (ImportError, NameError):  # pragma: no cover
     def unpack_from(fmt, iobuf, unpack=struct.unpack):  # noqa
     def unpack_from(fmt, iobuf, unpack=struct.unpack):  # noqa
         return unpack(fmt, iobuf.getvalue())  # <-- BytesIO
         return unpack(fmt, iobuf.getvalue())  # <-- BytesIO
 
 
+__all__ = ['AsynPool']
 
 
 logger = get_logger(__name__)
 logger = get_logger(__name__)
 error, debug = logger.error, logger.debug
 error, debug = logger.error, logger.debug
@@ -100,6 +101,7 @@ Ack = namedtuple('Ack', ('id', 'fd', 'payload'))
 
 
 
 
 def gen_not_started(gen):
 def gen_not_started(gen):
+    """Return true if generator is not started."""
     # gi_frame is None when generator stopped.
     # gi_frame is None when generator stopped.
     return gen.gi_frame and gen.gi_frame.f_lasti == -1
     return gen.gi_frame and gen.gi_frame.f_lasti == -1
 
 
@@ -150,8 +152,7 @@ else:
 
 
 def _select(readers=None, writers=None, err=None, timeout=0,
 def _select(readers=None, writers=None, err=None, timeout=0,
             poll=_select_imp):
             poll=_select_imp):
-    """Simple wrapper to :class:`~select.select`, using :`~select.poll`
-    as the implementation.
+    """Simple wrapper to :class:`~select.select`, using :`~select.poll`.
 
 
     Arguments:
     Arguments:
         readers (Set[Fd]): Set of reader fds to test if readable.
         readers (Set[Fd]): Set of reader fds to test if readable.
@@ -271,8 +272,7 @@ class ResultHandler(_pool.ResultHandler):
             callback(message)
             callback(message)
 
 
     def _make_process_result(self, hub):
     def _make_process_result(self, hub):
-        """Coroutine that reads messages from the pool processes
-        and calls the appropriate handler."""
+        """Coroutine reading messages from the pool processes."""
         fileno_to_outq = self.fileno_to_outq
         fileno_to_outq = self.fileno_to_outq
         on_state_change = self.on_state_change
         on_state_change = self.on_state_change
         add_reader = hub.add_reader
         add_reader = hub.add_reader
@@ -302,8 +302,7 @@ class ResultHandler(_pool.ResultHandler):
         raise RuntimeError('Not registered with event loop')
         raise RuntimeError('Not registered with event loop')
 
 
     def on_stop_not_started(self):
     def on_stop_not_started(self):
-        """This method is always used to stop when the helper thread is not
-        started."""
+        # This is always used, since we do not start any threads.
         cache = self.cache
         cache = self.cache
         check_timeouts = self.check_timeouts
         check_timeouts = self.check_timeouts
         fileno_to_outq = self.fileno_to_outq
         fileno_to_outq = self.fileno_to_outq
@@ -362,7 +361,8 @@ class ResultHandler(_pool.ResultHandler):
 
 
 
 
 class AsynPool(_pool.Pool):
 class AsynPool(_pool.Pool):
-    """Pool version that uses AIO instead of helper threads."""
+    """AsyncIO Pool (no threads)."""
+
     ResultHandler = ResultHandler
     ResultHandler = ResultHandler
     Worker = Worker
     Worker = Worker
 
 
@@ -456,7 +456,7 @@ class AsynPool(_pool.Pool):
             os.close(fd)
             os.close(fd)
 
 
     def register_with_event_loop(self, hub):
     def register_with_event_loop(self, hub):
-        """Registers the async pool with the current event loop."""
+        """Register the async pool with the current event loop."""
         self._result_handler.register_with_event_loop(hub)
         self._result_handler.register_with_event_loop(hub)
         self.handle_result_event = self._result_handler.handle_event
         self.handle_result_event = self._result_handler.handle_event
         self._create_timelimit_handlers(hub)
         self._create_timelimit_handlers(hub)
@@ -478,8 +478,7 @@ class AsynPool(_pool.Pool):
         hub.on_tick.add(self.on_poll_start)
         hub.on_tick.add(self.on_poll_start)
 
 
     def _create_timelimit_handlers(self, hub, now=time.time):
     def _create_timelimit_handlers(self, hub, now=time.time):
-        """For async pool this sets up the handlers used
-        to implement time limits."""
+        """Create handlers used to implement time limits."""
         call_later = hub.call_later
         call_later = hub.call_later
         trefs = self._tref_for_id = WeakValueDictionary()
         trefs = self._tref_for_id = WeakValueDictionary()
 
 
@@ -540,8 +539,7 @@ class AsynPool(_pool.Pool):
         self._mark_worker_as_available(inqW_fd)
         self._mark_worker_as_available(inqW_fd)
 
 
     def _create_process_handlers(self, hub, READ=READ, ERR=ERR):
     def _create_process_handlers(self, hub, READ=READ, ERR=ERR):
-        """For async pool this will create the handlers called
-        when a process is up/down and etc."""
+        """Create handlers called on process up/down, etc."""
         add_reader, remove_reader, remove_writer = (
         add_reader, remove_reader, remove_writer = (
             hub.add_reader, hub.remove_reader, hub.remove_writer,
             hub.add_reader, hub.remove_reader, hub.remove_writer,
         )
         )
@@ -649,8 +647,7 @@ class AsynPool(_pool.Pool):
     def _create_write_handlers(self, hub,
     def _create_write_handlers(self, hub,
                                pack=struct.pack, dumps=_pickle.dumps,
                                pack=struct.pack, dumps=_pickle.dumps,
                                protocol=HIGHEST_PROTOCOL):
                                protocol=HIGHEST_PROTOCOL):
-        """For async pool this creates the handlers used to write data to
-        child processes."""
+        """Create handlers used to write data to child processes."""
         fileno_to_inq = self._fileno_to_inq
         fileno_to_inq = self._fileno_to_inq
         fileno_to_synq = self._fileno_to_synq
         fileno_to_synq = self._fileno_to_synq
         outbound = self.outbound_buffer
         outbound = self.outbound_buffer
@@ -1028,8 +1025,7 @@ class AsynPool(_pool.Pool):
         pass
         pass
 
 
     def create_process_queues(self):
     def create_process_queues(self):
-        """Creates new in, out (and optionally syn) queues,
-        returned as a tuple."""
+        """Create new in, out, etc. queues, returned as a tuple."""
         # NOTE: Pipes must be set O_NONBLOCK at creation time (the original
         # NOTE: Pipes must be set O_NONBLOCK at creation time (the original
         # fd), otherwise it won't be possible to change the flags until
         # fd), otherwise it won't be possible to change the flags until
         # there's an actual reader/writer on the other side.
         # there's an actual reader/writer on the other side.
@@ -1047,8 +1043,7 @@ class AsynPool(_pool.Pool):
         return inq, outq, synq
         return inq, outq, synq
 
 
     def on_process_alive(self, pid):
     def on_process_alive(self, pid):
-        """Handler called when the :const:`WORKER_UP` message is received
-        from a child process.
+        """Called when reciving the :const:`WORKER_UP` message.
 
 
         Marks the process as ready to receive work.
         Marks the process as ready to receive work.
         """
         """
@@ -1064,8 +1059,7 @@ class AsynPool(_pool.Pool):
         self._all_inqueues.add(proc.inqW_fd)
         self._all_inqueues.add(proc.inqW_fd)
 
 
     def on_job_process_down(self, job, pid_gone):
     def on_job_process_down(self, job, pid_gone):
-        """Handler called for each job when the process it was assigned to
-        exits."""
+        """Called for each job when the process assigned to it exits."""
         if job._write_to and not job._write_to._is_alive():
         if job._write_to and not job._write_to._is_alive():
             # job was partially written
             # job was partially written
             self.on_partial_read(job, job._write_to)
             self.on_partial_read(job, job._write_to)
@@ -1075,9 +1069,12 @@ class AsynPool(_pool.Pool):
             self._put_back(job)
             self._put_back(job)
 
 
     def on_job_process_lost(self, job, pid, exitcode):
     def on_job_process_lost(self, job, pid, exitcode):
-        """Handler called for each *started* job when the process it
+        """Called when the process executing job' exits.
+
+        This happens when the process job'
         was assigned to exited by mysterious means (error exitcodes and
         was assigned to exited by mysterious means (error exitcodes and
-        signals)"""
+        signals).
+        """
         self.mark_as_worker_lost(job, exitcode)
         self.mark_as_worker_lost(job, exitcode)
 
 
     def human_write_stats(self):
     def human_write_stats(self):
@@ -1102,8 +1099,7 @@ class AsynPool(_pool.Pool):
         }
         }
 
 
     def _process_cleanup_queues(self, proc):
     def _process_cleanup_queues(self, proc):
-        """Handler called to clean up a processes queues after process
-        exit."""
+        """Called to clean up queues after process exit."""
         if not proc.dead:
         if not proc.dead:
             try:
             try:
                 self._queues[self._find_worker_queues(proc)] = None
                 self._queues[self._find_worker_queues(proc)] = None
@@ -1132,8 +1128,7 @@ class AsynPool(_pool.Pool):
         )
         )
 
 
     def _process_register_queues(self, proc, queues):
     def _process_register_queues(self, proc, queues):
-        """Marks new ownership for ``queues`` so that the fileno indices are
-        updated."""
+        """Mark new ownership for ``queues`` to update fileno indices."""
         assert queues in self._queues
         assert queues in self._queues
         b = len(self._queues)
         b = len(self._queues)
         self._queues[queues] = proc
         self._queues[queues] = proc
@@ -1157,7 +1152,9 @@ class AsynPool(_pool.Pool):
             self._quick_put = self._quick_get = self._poll_result = None
             self._quick_put = self._quick_get = self._poll_result = None
 
 
     def process_flush_queues(self, proc):
     def process_flush_queues(self, proc):
-        """Flushes all queues, including the outbound buffer, so that
+        """Flush all queues.
+
+        Including the outbound buffer, so that
         all tasks that haven't been started will be discarded.
         all tasks that haven't been started will be discarded.
 
 
         In Celery this is called whenever the transport connection is lost
         In Celery this is called whenever the transport connection is lost
@@ -1191,8 +1188,7 @@ class AsynPool(_pool.Pool):
                 break
                 break
 
 
     def on_partial_read(self, job, proc):
     def on_partial_read(self, job, proc):
-        """Called when a job was only partially written to a child process
-        and it exited."""
+        """Called when a job was partially written to exited child."""
         # worker terminated by signal:
         # worker terminated by signal:
         # we cannot reuse the sockets again, because we don't know if
         # we cannot reuse the sockets again, because we don't know if
         # the process wrote/read anything frmo them, and if so we cannot
         # the process wrote/read anything frmo them, and if so we cannot
@@ -1218,8 +1214,10 @@ class AsynPool(_pool.Pool):
             assert len(self._queues) == before
             assert len(self._queues) == before
 
 
     def destroy_queues(self, queues, proc):
     def destroy_queues(self, queues, proc):
-        """Destroy queues that can no longer be used, so that they
-        be replaced by new sockets."""
+        """Destroy queues that can no longer be used.
+
+        This way they can be replaced by new usable sockets.
+        """
         assert not proc._is_alive()
         assert not proc._is_alive()
         self._waiting_to_start.discard(proc)
         self._waiting_to_start.discard(proc)
         removed = 1
         removed = 1

+ 4 - 1
celery/concurrency/base.py

@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
-"""Base Execution Pool"""
+"""Base Execution Pool."""
 from __future__ import absolute_import, unicode_literals
 from __future__ import absolute_import, unicode_literals
 
 
 import logging
 import logging
@@ -24,6 +24,7 @@ logger = get_logger('celery.pool')
 def apply_target(target, args=(), kwargs={}, callback=None,
 def apply_target(target, args=(), kwargs={}, callback=None,
                  accept_callback=None, pid=None, getpid=os.getpid,
                  accept_callback=None, pid=None, getpid=os.getpid,
                  propagate=(), monotonic=monotonic, **_):
                  propagate=(), monotonic=monotonic, **_):
+    """Apply function within pool context."""
     if accept_callback:
     if accept_callback:
         accept_callback(pid or getpid(), monotonic())
         accept_callback(pid or getpid(), monotonic())
     try:
     try:
@@ -45,6 +46,8 @@ def apply_target(target, args=(), kwargs={}, callback=None,
 
 
 
 
 class BasePool(object):
 class BasePool(object):
+    """Task pool."""
+
     RUN = 0x1
     RUN = 0x1
     CLOSE = 0x2
     CLOSE = 0x2
     TERMINATE = 0x3
     TERMINATE = 0x3

+ 3 - 0
celery/concurrency/eventlet.py

@@ -38,6 +38,7 @@ def apply_target(target, args=(), kwargs={}, callback=None,
 
 
 
 
 class Timer(_timer.Timer):
 class Timer(_timer.Timer):
+    """Eventlet Timer."""
 
 
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
         from eventlet.greenthread import spawn_after
         from eventlet.greenthread import spawn_after
@@ -89,6 +90,8 @@ class Timer(_timer.Timer):
 
 
 
 
 class TaskPool(base.BasePool):
 class TaskPool(base.BasePool):
+    """Eventlet Task Pool."""
+
     Timer = Timer
     Timer = Timer
 
 
     signal_safe = False
     signal_safe = False

+ 2 - 0
celery/concurrency/gevent.py

@@ -73,6 +73,8 @@ class Timer(_timer.Timer):
 
 
 
 
 class TaskPool(BasePool):
 class TaskPool(BasePool):
+    """GEvent Pool."""
+
     Timer = Timer
     Timer = Timer
 
 
     signal_safe = False
     signal_safe = False

+ 2 - 1
celery/concurrency/prefork.py

@@ -82,7 +82,7 @@ def process_initializer(app, hostname):
 
 
 
 
 def process_destructor(pid, exitcode):
 def process_destructor(pid, exitcode):
-    """Pool child process destructor
+    """Pool child process destructor.
 
 
     Dispatch the :signal:`worker_process_shutdown` signal.
     Dispatch the :signal:`worker_process_shutdown` signal.
     """
     """
@@ -93,6 +93,7 @@ def process_destructor(pid, exitcode):
 
 
 class TaskPool(BasePool):
 class TaskPool(BasePool):
     """Multiprocessing Pool implementation."""
     """Multiprocessing Pool implementation."""
+
     Pool = AsynPool
     Pool = AsynPool
     BlockingPool = BlockingPool
     BlockingPool = BlockingPool
 
 

+ 1 - 0
celery/concurrency/solo.py

@@ -11,6 +11,7 @@ __all__ = ['TaskPool']
 
 
 class TaskPool(BasePool):
 class TaskPool(BasePool):
     """Solo task pool (blocking, inline, fast)."""
     """Solo task pool (blocking, inline, fast)."""
+
     body_can_be_buffer = True
     body_can_be_buffer = True
 
 
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):

+ 7 - 2
celery/contrib/abortable.py

@@ -135,13 +135,16 @@ class AbortableAsyncResult(AsyncResult):
 
 
 
 
 class AbortableTask(Task):
 class AbortableTask(Task):
-    """A celery task that serves as a base class for all :class:`Task`'s
+    """Task that can be aborted.
+
+    This serves as a base class for all :class:`Task`'s
     that support aborting during execution.
     that support aborting during execution.
 
 
     All subclasses of :class:`AbortableTask` must call the
     All subclasses of :class:`AbortableTask` must call the
     :meth:`is_aborted` method periodically and act accordingly when
     :meth:`is_aborted` method periodically and act accordingly when
     the call evaluates to :const:`True`.
     the call evaluates to :const:`True`.
     """
     """
+
     abstract = True
     abstract = True
 
 
     def AsyncResult(self, task_id):
     def AsyncResult(self, task_id):
@@ -149,7 +152,9 @@ class AbortableTask(Task):
         return AbortableAsyncResult(task_id, backend=self.backend)
         return AbortableAsyncResult(task_id, backend=self.backend)
 
 
     def is_aborted(self, **kwargs):
     def is_aborted(self, **kwargs):
-        """Checks against the backend whether this
+        """Return true if task is aborted.
+
+        Checks against the backend whether this
         :class:`AbortableAsyncResult` is :const:`ABORTED`.
         :class:`AbortableAsyncResult` is :const:`ABORTED`.
 
 
         Always return :const:`False` in case the `task_id` parameter
         Always return :const:`False` in case the `task_id` parameter

+ 15 - 6
celery/contrib/migrate.py

@@ -29,11 +29,13 @@ Moving task {state.filtered}/{state.strtotal}: \
 
 
 
 
 class StopFiltering(Exception):
 class StopFiltering(Exception):
-    pass
+    """Semi-predicate used to signal filter stop."""
 
 
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class State(object):
 class State(object):
+    """Migration progress state."""
+
     count = 0
     count = 0
     filtered = 0
     filtered = 0
     total_apx = 0
     total_apx = 0
@@ -55,6 +57,7 @@ def republish(producer, message, exchange=None, routing_key=None,
                             'content_type',
                             'content_type',
                             'content_encoding',
                             'content_encoding',
                             'headers']):
                             'headers']):
+    """Republish message."""
     body = ensure_bytes(message.body)  # use raw message body.
     body = ensure_bytes(message.body)  # use raw message body.
     info, headers, props = (message.delivery_info,
     info, headers, props = (message.delivery_info,
                             message.headers, message.properties)
                             message.headers, message.properties)
@@ -75,6 +78,7 @@ def republish(producer, message, exchange=None, routing_key=None,
 
 
 
 
 def migrate_task(producer, body_, message, queues=None):
 def migrate_task(producer, body_, message, queues=None):
+    """Migrate single task message."""
     info = message.delivery_info
     info = message.delivery_info
     queues = {} if queues is None else queues
     queues = {} if queues is None else queues
     republish(producer, message,
     republish(producer, message,
@@ -94,6 +98,7 @@ def filter_callback(callback, tasks):
 
 
 def migrate_tasks(source, dest, migrate=migrate_task, app=None,
 def migrate_tasks(source, dest, migrate=migrate_task, app=None,
                   queues=None, **kwargs):
                   queues=None, **kwargs):
+    """Migrate tasks from one broker to another."""
     app = app_or_default(app)
     app = app_or_default(app)
     queues = prepare_queues(queues)
     queues = prepare_queues(queues)
     producer = app.amqp.Producer(dest)
     producer = app.amqp.Producer(dest)
@@ -219,10 +224,12 @@ def expand_dest(ret, exchange, routing_key):
 
 
 
 
 def task_id_eq(task_id, body, message):
 def task_id_eq(task_id, body, message):
+    """Return true if task id equals task_id'."""
     return body['id'] == task_id
     return body['id'] == task_id
 
 
 
 
 def task_id_in(ids, body, message):
 def task_id_in(ids, body, message):
+    """Return true if task id is member of set ids'."""
     return body['id'] in ids
     return body['id'] in ids
 
 
 
 
@@ -241,6 +248,7 @@ def start_filter(app, conn, filter, limit=None, timeout=1.0,
                  ack_messages=False, tasks=None, queues=None,
                  ack_messages=False, tasks=None, queues=None,
                  callback=None, forever=False, on_declare_queue=None,
                  callback=None, forever=False, on_declare_queue=None,
                  consume_from=None, state=None, accept=None, **kwargs):
                  consume_from=None, state=None, accept=None, **kwargs):
+    """Filter tasks."""
     state = state or State()
     state = state or State()
     queues = prepare_queues(queues)
     queues = prepare_queues(queues)
     consume_from = [_maybe_queue(app, q)
     consume_from = [_maybe_queue(app, q)
@@ -314,8 +322,9 @@ def move_task_by_id(task_id, dest, **kwargs):
 
 
 
 
 def move_by_idmap(map, **kwargs):
 def move_by_idmap(map, **kwargs):
-    """Moves tasks by matching from a ``task_id: queue`` mapping,
-    where ``queue`` is a queue to move the task to.
+    """Move tasks by matching from a ``task_id: queue`` mapping.
+
+    Where ``queue`` is a queue to move the task to.
 
 
     Example:
     Example:
         >>> move_by_idmap({
         >>> move_by_idmap({
@@ -333,8 +342,9 @@ def move_by_idmap(map, **kwargs):
 
 
 
 
 def move_by_taskmap(map, **kwargs):
 def move_by_taskmap(map, **kwargs):
-    """Moves tasks by matching from a ``task_name: queue`` mapping,
-    where ``queue`` is the queue to move the task to.
+    """Move tasks by matching from a ``task_name: queue`` mapping.
+
+    ``queue`` is the queue to move the task to.
 
 
     Example:
     Example:
         >>> move_by_taskmap({
         >>> move_by_taskmap({
@@ -342,7 +352,6 @@ def move_by_taskmap(map, **kwargs):
         ...     'tasks.mul': Queue('name'),
         ...     'tasks.mul': Queue('name'),
         ... })
         ... })
     """
     """
-
     def task_name_in_map(body, message):
     def task_name_in_map(body, message):
         return map.get(body['task'])  # <- name of task
         return map.get(body['task'])  # <- name of task
 
 

+ 3 - 2
celery/contrib/rdb.py

@@ -88,6 +88,8 @@ SESSION_ENDED = '{self.ident}: Session with {self.remote_addr} ended.'
 
 
 
 
 class Rdb(Pdb):
 class Rdb(Pdb):
+    """Remote debugger."""
+
     me = 'Remote Debugger'
     me = 'Remote Debugger'
     _prev_outs = None
     _prev_outs = None
     _sock = None
     _sock = None
@@ -177,8 +179,7 @@ class Rdb(Pdb):
 
 
 
 
 def debugger():
 def debugger():
-    """Return the current debugger instance (if any),
-    or creates a new one."""
+    """Return the current debugger instance, or create if none."""
     rdb = _current[0]
     rdb = _current[0]
     if rdb is None or not rdb.active:
     if rdb is None or not rdb.active:
         rdb = _current[0] = Rdb()
         rdb = _current[0] = Rdb()

+ 4 - 0
celery/contrib/sphinx.py

@@ -40,6 +40,8 @@ from celery.five import getfullargspec
 
 
 
 
 class TaskDocumenter(FunctionDocumenter):
 class TaskDocumenter(FunctionDocumenter):
+    """Document task definitions."""
+
     objtype = 'task'
     objtype = 'task'
     member_order = 11
     member_order = 11
 
 
@@ -61,12 +63,14 @@ class TaskDocumenter(FunctionDocumenter):
 
 
 
 
 class TaskDirective(PyModulelevel):
 class TaskDirective(PyModulelevel):
+    """Sphinx task directive."""
 
 
     def get_signature_prefix(self, sig):
     def get_signature_prefix(self, sig):
         return self.env.config.celery_task_prefix
         return self.env.config.celery_task_prefix
 
 
 
 
 def setup(app):
 def setup(app):
+    """Setup Sphinx extension."""
     app.add_autodocumenter(TaskDocumenter)
     app.add_autodocumenter(TaskDocumenter)
     app.domains['py'].directives['task'] = TaskDirective
     app.domains['py'].directives['task'] = TaskDirective
     app.add_config_value('celery_task_prefix', '(task)', True)
     app.add_config_value('celery_task_prefix', '(task)', True)

+ 7 - 6
celery/events/__init__.py

@@ -100,6 +100,7 @@ class EventDispatcher(object):
     Note:
     Note:
         You need to :meth:`close` this after use.
         You need to :meth:`close` this after use.
     """
     """
+
     DISABLED_TRANSPORTS = {'sql'}
     DISABLED_TRANSPORTS = {'sql'}
 
 
     app = None
     app = None
@@ -168,8 +169,7 @@ class EventDispatcher(object):
 
 
     def publish(self, type, fields, producer,
     def publish(self, type, fields, producer,
                 blind=False, Event=Event, **kwargs):
                 blind=False, Event=Event, **kwargs):
-        """Publish event using a custom :class:`~kombu.Producer`
-        instance.
+        """Publish event using custom :class:`~kombu.Producer`.
 
 
         Arguments:
         Arguments:
             type (str): Event type name, with group separated by dash (`-`).
             type (str): Event type name, with group separated by dash (`-`).
@@ -251,7 +251,7 @@ class EventDispatcher(object):
                                     retry_policy=retry_policy)
                                     retry_policy=retry_policy)
 
 
     def flush(self, errors=True, groups=True):
     def flush(self, errors=True, groups=True):
-        """Flushes the outbound buffer."""
+        """Flush the outbound buffer."""
         if errors:
         if errors:
             buf = list(self._outbound_buffer)
             buf = list(self._outbound_buffer)
             try:
             try:
@@ -267,7 +267,7 @@ class EventDispatcher(object):
                     events[:] = []  # list.clear
                     events[:] = []  # list.clear
 
 
     def extend_buffer(self, other):
     def extend_buffer(self, other):
-        """Copies the outbound buffer of another instance."""
+        """Copy the outbound buffer of another instance."""
         self._outbound_buffer.extend(other._outbound_buffer)
         self._outbound_buffer.extend(other._outbound_buffer)
 
 
     def close(self):
     def close(self):
@@ -293,6 +293,7 @@ class EventReceiver(ConsumerMixin):
             The special handler `"*"` captures all events that don't have a
             The special handler `"*"` captures all events that don't have a
             handler.
             handler.
     """
     """
+
     app = None
     app = None
 
 
     def __init__(self, channel, handlers=None, routing_key='#',
     def __init__(self, channel, handlers=None, routing_key='#',
@@ -334,8 +335,7 @@ class EventReceiver(ConsumerMixin):
         })
         })
 
 
     def process(self, type, event):
     def process(self, type, event):
-        """Process the received event by dispatching it to the appropriate
-        handler."""
+        """Process event by dispatching to configured handler."""
         handler = self.handlers.get(type) or self.handlers.get('*')
         handler = self.handlers.get(type) or self.handlers.get('*')
         handler and handler(event)
         handler and handler(event)
 
 
@@ -406,6 +406,7 @@ class EventReceiver(ConsumerMixin):
 
 
 
 
 class Events(object):
 class Events(object):
+    """Implements app.events."""
 
 
     def __init__(self, app=None):
     def __init__(self, app=None):
         self.app = app
         self.app = app

+ 3 - 0
celery/events/cursesmon.py

@@ -37,6 +37,8 @@ events: {s.event_count} tasks:{s.task_count} workers:{w_alive}/{w_all}
 
 
 
 
 class CursesMonitor(object):  # pragma: no cover
 class CursesMonitor(object):  # pragma: no cover
+    """A curses based Celery task monitor."""
+
     keymap = {}
     keymap = {}
     win = None
     win = None
     screen_width = None
     screen_width = None
@@ -517,6 +519,7 @@ def capture_events(app, state, display):  # pragma: no cover
 
 
 
 
 def evtop(app=None):  # pragma: no cover
 def evtop(app=None):  # pragma: no cover
+    """Start curses monitor."""
     app = app_or_default(app)
     app = app_or_default(app)
     state = app.events.State()
     state = app.events.State()
     display = CursesMonitor(state, app)
     display = CursesMonitor(state, app)

+ 2 - 0
celery/events/dumper.py

@@ -38,6 +38,7 @@ def humanize_type(type):
 
 
 
 
 class Dumper(object):
 class Dumper(object):
+    """Monitor events."""
 
 
     def __init__(self, out=sys.stdout):
     def __init__(self, out=sys.stdout):
         self.out = out
         self.out = out
@@ -84,6 +85,7 @@ class Dumper(object):
 
 
 
 
 def evdump(app=None, out=sys.stdout):
 def evdump(app=None, out=sys.stdout):
+    """Start event dump."""
     app = app_or_default(app)
     app = app_or_default(app)
     dumper = Dumper(out=out)
     dumper = Dumper(out=out)
     dumper.say('-> evdump: starting capture...')
     dumper.say('-> evdump: starting capture...')

+ 2 - 0
celery/events/snapshot.py

@@ -25,6 +25,7 @@ logger = get_logger('celery.evcam')
 
 
 
 
 class Polaroid(object):
 class Polaroid(object):
+    """Record event snapshots."""
 
 
     timer = None
     timer = None
     shutter_signal = Signal(providing_args=('state',))
     shutter_signal = Signal(providing_args=('state',))
@@ -87,6 +88,7 @@ class Polaroid(object):
 
 
 def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
 def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
           logfile=None, pidfile=None, timer=None, app=None):
           logfile=None, pidfile=None, timer=None, app=None):
+    """Start snapshot recorder."""
     app = app_or_default(app)
     app = app_or_default(app)
 
 
     if pidfile:
     if pidfile:

+ 9 - 2
celery/events/state.py

@@ -109,6 +109,7 @@ def _warn_drift(hostname, drift, local_received, timestamp):
 def heartbeat_expires(timestamp, freq=60,
 def heartbeat_expires(timestamp, freq=60,
                       expire_window=HEARTBEAT_EXPIRE_WINDOW,
                       expire_window=HEARTBEAT_EXPIRE_WINDOW,
                       Decimal=Decimal, float=float, isinstance=isinstance):
                       Decimal=Decimal, float=float, isinstance=isinstance):
+    """Return time when heartbeat expires."""
     # some json implementations returns decimal.Decimal objects,
     # some json implementations returns decimal.Decimal objects,
     # which aren't compatible with float.
     # which aren't compatible with float.
     freq = float(freq) if isinstance(freq, Decimal) else freq
     freq = float(freq) if isinstance(freq, Decimal) else freq
@@ -148,6 +149,7 @@ def with_unique_field(attr):
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class Worker(object):
 class Worker(object):
     """Worker State."""
     """Worker State."""
+
     heartbeat_max = 4
     heartbeat_max = 4
     expire_window = HEARTBEAT_EXPIRE_WINDOW
     expire_window = HEARTBEAT_EXPIRE_WINDOW
 
 
@@ -241,6 +243,7 @@ class Worker(object):
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class Task(object):
 class Task(object):
     """Task State."""
     """Task State."""
+
     name = received = sent = started = succeeded = failed = retried = \
     name = received = sent = started = succeeded = failed = retried = \
         revoked = rejected = args = kwargs = eta = expires = retries = \
         revoked = rejected = args = kwargs = eta = expires = retries = \
         worker = result = exception = timestamp = runtime = traceback = \
         worker = result = exception = timestamp = runtime = traceback = \
@@ -376,6 +379,7 @@ class Task(object):
 
 
 class State(object):
 class State(object):
     """Records clusters state."""
     """Records clusters state."""
+
     Worker = Worker
     Worker = Worker
     Task = Task
     Task = Task
     event_count = 0
     event_count = 0
@@ -631,8 +635,11 @@ class State(object):
                 break
                 break
 
 
     def tasks_by_time(self, limit=None, reverse=True):
     def tasks_by_time(self, limit=None, reverse=True):
-        """Generator giving tasks ordered by time,
-        in ``(uuid, Task)`` tuples."""
+        """Generator yielding tasks ordered by time.
+
+        Yields:
+            Tuples of ``(uuid, Task)``.
+        """
         _heap = self._taskheap
         _heap = self._taskheap
         if reverse:
         if reverse:
             _heap = reversed(_heap)
             _heap = reversed(_heap)

+ 8 - 12
celery/exceptions.py

@@ -29,23 +29,19 @@ Task of kind {0} never registered, please make sure it's imported.\
 
 
 
 
 class CeleryError(Exception):
 class CeleryError(Exception):
-    pass
+    """Base class for all Celery errors."""
 
 
 
 
 class CeleryWarning(UserWarning):
 class CeleryWarning(UserWarning):
-    pass
+    """Base class for all Celery warnings."""
 
 
 
 
 class SecurityError(CeleryError):
 class SecurityError(CeleryError):
-    """Security related exceptions.
-
-    Handle with care.
-    """
-    pass
+    """Security related exception."""
 
 
 
 
 class TaskPredicate(CeleryError):
 class TaskPredicate(CeleryError):
-    pass
+    """Base class for task-related semi-predicates."""
 
 
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
@@ -152,7 +148,7 @@ class NotConfigured(CeleryWarning):
 
 
 
 
 class AlwaysEagerIgnored(CeleryWarning):
 class AlwaysEagerIgnored(CeleryWarning):
-    """send_task ignores :setting:`task_always_eager` option"""
+    """send_task ignores :setting:`task_always_eager` option."""
 
 
 
 
 class InvalidTaskError(CeleryError):
 class InvalidTaskError(CeleryError):
@@ -168,15 +164,15 @@ class ChordError(CeleryError):
 
 
 
 
 class CPendingDeprecationWarning(PendingDeprecationWarning):
 class CPendingDeprecationWarning(PendingDeprecationWarning):
-    pass
+    """Warning of pending deprecation."""
 
 
 
 
 class CDeprecationWarning(DeprecationWarning):
 class CDeprecationWarning(DeprecationWarning):
-    pass
+    """Warning of deprecation."""
 
 
 
 
 class FixupWarning(CeleryWarning):
 class FixupWarning(CeleryWarning):
-    pass
+    """Fixup related warning."""
 
 
 
 
 class DuplicateNodenameWarning(CeleryWarning):
 class DuplicateNodenameWarning(CeleryWarning):

+ 2 - 222
celery/five.py

@@ -2,227 +2,7 @@
 """Python 2/3 compatibility utilities."""
 """Python 2/3 compatibility utilities."""
 from __future__ import absolute_import, unicode_literals
 from __future__ import absolute_import, unicode_literals
 
 
-import operator
 import sys
 import sys
+import vine.five
 
 
-from importlib import import_module
-from types import ModuleType
-
-# extends vine.five
-from vine import five
-from vine.five import *  # noqa
-from vine.five import __all__ as _all_five
-
-# bloody flake8
-items = five.items
-bytes_if_py2 = five.bytes_if_py2
-string_t = five.string_t
-
-try:
-    from functools import reduce
-except ImportError:
-    pass
-
-__all__ = [
-    'class_property', 'reclassmethod', 'create_module', 'recreate_module',
-]
-__all__ += _all_five
-
-#  ############# Module Generation ##########################
-
-# Utilities to dynamically
-# recreate modules, either for lazy loading or
-# to create old modules at runtime instead of
-# having them litter the source tree.
-
-# import fails in python 2.5. fallback to reduce in stdlib
-
-MODULE_DEPRECATED = """
-The module %s is deprecated and will be removed in a future version.
-"""
-
-DEFAULT_ATTRS = {'__file__', '__path__', '__doc__', '__all__'}
-
-# im_func is no longer available in Py3.
-# instead the unbound method itself can be used.
-if sys.version_info[0] == 3:  # pragma: no cover
-    def fun_of_method(method):
-        return method
-else:
-    def fun_of_method(method):  # noqa
-        return method.im_func
-
-
-def getappattr(path):
-    """Gets attribute from the current_app recursively,
-    (e.g., ``getappattr('amqp.get_task_consumer')``."""
-    from celery import current_app
-    return current_app._rgetattr(path)
-
-
-def _compat_periodic_task_decorator(*args, **kwargs):
-    from celery.task import periodic_task
-    return periodic_task(*args, **kwargs)
-
-COMPAT_MODULES = {
-    'celery': {
-        'execute': {
-            'send_task': 'send_task',
-        },
-        'decorators': {
-            'task': 'task',
-            'periodic_task': _compat_periodic_task_decorator,
-        },
-        'log': {
-            'get_default_logger': 'log.get_default_logger',
-            'setup_logger': 'log.setup_logger',
-            'setup_logging_subsystem': 'log.setup_logging_subsystem',
-            'redirect_stdouts_to_logger': 'log.redirect_stdouts_to_logger',
-        },
-        'messaging': {
-            'TaskConsumer': 'amqp.TaskConsumer',
-            'establish_connection': 'connection',
-            'get_consumer_set': 'amqp.TaskConsumer',
-        },
-        'registry': {
-            'tasks': 'tasks',
-        },
-    },
-    'celery.task': {
-        'control': {
-            'broadcast': 'control.broadcast',
-            'rate_limit': 'control.rate_limit',
-            'time_limit': 'control.time_limit',
-            'ping': 'control.ping',
-            'revoke': 'control.revoke',
-            'discard_all': 'control.purge',
-            'inspect': 'control.inspect',
-        },
-        'schedules': 'celery.schedules',
-        'chords': 'celery.canvas',
-    }
-}
-
-
-class class_property(object):
-
-    def __init__(self, getter=None, setter=None):
-        if getter is not None and not isinstance(getter, classmethod):
-            getter = classmethod(getter)
-        if setter is not None and not isinstance(setter, classmethod):
-            setter = classmethod(setter)
-        self.__get = getter
-        self.__set = setter
-
-        info = getter.__get__(object)  # just need the info attrs.
-        self.__doc__ = info.__doc__
-        self.__name__ = info.__name__
-        self.__module__ = info.__module__
-
-    def __get__(self, obj, type=None):
-        if obj and type is None:
-            type = obj.__class__
-        return self.__get.__get__(obj, type)()
-
-    def __set__(self, obj, value):
-        if obj is None:
-            return self
-        return self.__set.__get__(obj)(value)
-
-    def setter(self, setter):
-        return self.__class__(self.__get, setter)
-
-
-def reclassmethod(method):
-    return classmethod(fun_of_method(method))
-
-
-class LazyModule(ModuleType):
-    _compat_modules = ()
-    _all_by_module = {}
-    _direct = {}
-    _object_origins = {}
-
-    def __getattr__(self, name):
-        if name in self._object_origins:
-            module = __import__(self._object_origins[name], None, None, [name])
-            for item in self._all_by_module[module.__name__]:
-                setattr(self, item, getattr(module, item))
-            return getattr(module, name)
-        elif name in self._direct:  # pragma: no cover
-            module = __import__(self._direct[name], None, None, [name])
-            setattr(self, name, module)
-            return module
-        return ModuleType.__getattribute__(self, name)
-
-    def __dir__(self):
-        return list(set(self.__all__) | DEFAULT_ATTRS)
-
-    def __reduce__(self):
-        return import_module, (self.__name__,)
-
-
-def create_module(name, attrs, cls_attrs=None, pkg=None,
-                  base=LazyModule, prepare_attr=None):
-    fqdn = '.'.join([pkg.__name__, name]) if pkg else name
-    cls_attrs = {} if cls_attrs is None else cls_attrs
-    pkg, _, modname = name.rpartition('.')
-    cls_attrs['__module__'] = pkg
-
-    attrs = {
-        attr_name: (prepare_attr(attr) if prepare_attr else attr)
-        for attr_name, attr in items(attrs)
-    }
-    module = sys.modules[fqdn] = type(
-        bytes_if_py2(modname), (base,), cls_attrs)(bytes_if_py2(name))
-    module.__dict__.update(attrs)
-    return module
-
-
-def recreate_module(name, compat_modules=(), by_module={}, direct={},
-                    base=LazyModule, **attrs):
-    old_module = sys.modules[name]
-    origins = get_origins(by_module)
-    compat_modules = COMPAT_MODULES.get(name, ())
-
-    _all = tuple(set(reduce(
-        operator.add,
-        [tuple(v) for v in [compat_modules, origins, direct, attrs]],
-    )))
-    if sys.version_info[0] < 3:
-        _all = [s.encode() for s in _all]
-    cattrs = dict(
-        _compat_modules=compat_modules,
-        _all_by_module=by_module, _direct=direct,
-        _object_origins=origins,
-        __all__=_all,
-    )
-    new_module = create_module(name, attrs, cls_attrs=cattrs, base=base)
-    new_module.__dict__.update({
-        mod: get_compat_module(new_module, mod) for mod in compat_modules
-    })
-    return old_module, new_module
-
-
-def get_compat_module(pkg, name):
-    from .local import Proxy
-
-    def prepare(attr):
-        if isinstance(attr, string_t):
-            return Proxy(getappattr, (attr,))
-        return attr
-
-    attrs = COMPAT_MODULES[pkg.__name__][name]
-    if isinstance(attrs, string_t):
-        fqdn = '.'.join([pkg.__name__, name])
-        module = sys.modules[fqdn] = import_module(attrs)
-        return module
-    attrs['__all__'] = list(attrs)
-    return create_module(name, dict(attrs), pkg=pkg, prepare_attr=prepare)
-
-
-def get_origins(defs):
-    origins = {}
-    for module, attrs in items(defs):
-        origins.update({attr: module for attr in attrs})
-    return origins
+sys.modules[__name__] = vine.five

+ 1 - 0
celery/fixups/__init__.py

@@ -0,0 +1 @@
+"""Fixups."""

+ 2 - 0
celery/fixups/django.py

@@ -32,6 +32,7 @@ def _maybe_close_fd(fh):
 
 
 
 
 def fixup(app, env='DJANGO_SETTINGS_MODULE'):
 def fixup(app, env='DJANGO_SETTINGS_MODULE'):
+    """Install Django fixup if settings module environment is set."""
     SETTINGS_MODULE = os.environ.get(env)
     SETTINGS_MODULE = os.environ.get(env)
     if SETTINGS_MODULE and 'django' not in app.loader_cls.lower():
     if SETTINGS_MODULE and 'django' not in app.loader_cls.lower():
         try:
         try:
@@ -43,6 +44,7 @@ def fixup(app, env='DJANGO_SETTINGS_MODULE'):
 
 
 
 
 class DjangoFixup(object):
 class DjangoFixup(object):
+    """Fixup installed when using Django."""
 
 
     def __init__(self, app):
     def __init__(self, app):
         self.app = app
         self.app = app

+ 1 - 1
celery/loaders/__init__.py

@@ -18,5 +18,5 @@ LOADER_ALIASES = {
 
 
 
 
 def get_loader_cls(loader):
 def get_loader_cls(loader):
-    """Get loader class by name/alias"""
+    """Get loader class by name/alias."""
     return symbol_by_name(loader, LOADER_ALIASES, imp=import_from_cwd)
     return symbol_by_name(loader, LOADER_ALIASES, imp=import_from_cwd)

+ 1 - 1
celery/loaders/app.py

@@ -8,4 +8,4 @@ __all__ = ['AppLoader']
 
 
 
 
 class AppLoader(BaseLoader):
 class AppLoader(BaseLoader):
-    pass
+    """Default loader used when an app is specified."""

+ 9 - 14
celery/loaders/base.py

@@ -37,7 +37,7 @@ unconfigured = object()
 
 
 
 
 class BaseLoader(object):
 class BaseLoader(object):
-    """The base class for loaders.
+    """Base class for loaders.
 
 
     Loaders handles,
     Loaders handles,
 
 
@@ -54,6 +54,7 @@ class BaseLoader(object):
 
 
         * What modules are imported to find tasks?
         * What modules are imported to find tasks?
     """
     """
+
     builtin_modules = frozenset()
     builtin_modules = frozenset()
     configured = False
     configured = False
     override_backends = {}
     override_backends = {}
@@ -71,25 +72,23 @@ class BaseLoader(object):
         return datetime.now()
         return datetime.now()
 
 
     def on_task_init(self, task_id, task):
     def on_task_init(self, task_id, task):
-        """This method is called before a task is executed."""
+        """Called before a task is executed."""
         pass
         pass
 
 
     def on_process_cleanup(self):
     def on_process_cleanup(self):
-        """This method is called after a task is executed."""
+        """Called after a task is executed."""
         pass
         pass
 
 
     def on_worker_init(self):
     def on_worker_init(self):
-        """This method is called when the worker (:program:`celery worker`)
-        starts."""
+        """Called when the worker (:program:`celery worker`) starts."""
         pass
         pass
 
 
     def on_worker_shutdown(self):
     def on_worker_shutdown(self):
-        """This method is called when the worker (:program:`celery worker`)
-        shuts down."""
+        """Called when the worker (:program:`celery worker`) shuts down."""
         pass
         pass
 
 
     def on_worker_process_init(self):
     def on_worker_process_init(self):
-        """This method is called when a child process starts."""
+        """Called when a child process starts."""
         pass
         pass
 
 
     def import_task_module(self, module):
     def import_task_module(self, module):
@@ -181,9 +180,7 @@ class BaseLoader(object):
         typemap = dict(Option.typemap, **extra_types)
         typemap = dict(Option.typemap, **extra_types)
 
 
         def getarg(arg):
         def getarg(arg):
-            """Parse a single configuration definition from
-            the command-line."""
-
+            """Parse single configuration from command-line."""
             # ## find key/value
             # ## find key/value
             # ns.key=value|ns_key=value (case insensitive)
             # ns.key=value|ns_key=value (case insensitive)
             key, value = arg.split('=', 1)
             key, value = arg.split('=', 1)
@@ -251,9 +248,7 @@ def autodiscover_tasks(packages, related_name='tasks'):
 
 
 
 
 def find_related_module(package, related_name):
 def find_related_module(package, related_name):
-    """Given a package name and a module name, tries to find that
-    module."""
-
+    """Find module in package."""
     # Django 1.7 allows for speciying a class name in INSTALLED_APPS.
     # Django 1.7 allows for speciying a class name in INSTALLED_APPS.
     # (Issue #2248).
     # (Issue #2248).
     try:
     try:

+ 1 - 2
celery/loaders/default.py

@@ -26,8 +26,7 @@ class Loader(BaseLoader):
         return DictAttribute(settingsdict)
         return DictAttribute(settingsdict)
 
 
     def read_configuration(self, fail_silently=True):
     def read_configuration(self, fail_silently=True):
-        """Read configuration from :file:`celeryconfig.py` and configure
-        celery and Django so it can be used by regular Python."""
+        """Read configuration from :file:`celeryconfig.py`."""
         configname = os.environ.get('CELERY_CONFIG_MODULE',
         configname = os.environ.get('CELERY_CONFIG_MODULE',
                                     DEFAULT_CONFIG_MODULE)
                                     DEFAULT_CONFIG_MODULE)
         try:
         try:

+ 218 - 7
celery/local.py

@@ -8,10 +8,13 @@ Parts of this module is Copyright by Werkzeug Team.
 """
 """
 from __future__ import absolute_import, unicode_literals
 from __future__ import absolute_import, unicode_literals
 
 
-import importlib
+import operator
 import sys
 import sys
 
 
-from .five import bytes_if_py2, string
+from importlib import import_module
+from types import ModuleType
+
+from .five import bytes_if_py2, items, string, string_t
 
 
 __all__ = ['Proxy', 'PromiseProxy', 'try_import', 'maybe_evaluate']
 __all__ = ['Proxy', 'PromiseProxy', 'try_import', 'maybe_evaluate']
 
 
@@ -41,10 +44,12 @@ def _default_cls_attr(name, type_, cls_value):
 
 
 
 
 def try_import(module, default=None):
 def try_import(module, default=None):
-    """Try to import and return module, or return
-    None if the module does not exist."""
+    """Try to import and return module.
+
+    Returns None if the module does not exist.
+    """
     try:
     try:
-        return importlib.import_module(module)
+        return import_module(module)
     except ImportError:
     except ImportError:
         return default
         return default
 
 
@@ -95,9 +100,12 @@ class Proxy(object):
         return self._get_class()
         return self._get_class()
 
 
     def _get_current_object(self):
     def _get_current_object(self):
-        """Return the current object.  This is useful if you want the real
+        """Get current object.
+
+        This is useful if you want the real
         object behind the proxy at a time for performance reasons or because
         object behind the proxy at a time for performance reasons or because
-        you want to pass the object into a different context."""
+        you want to pass the object into a different context.
+        """
         loc = object.__getattribute__(self, '_Proxy__local')
         loc = object.__getattribute__(self, '_Proxy__local')
         if not hasattr(loc, '__release_local__'):
         if not hasattr(loc, '__release_local__'):
             return loc(*self.__args, **self.__kwargs)
             return loc(*self.__args, **self.__kwargs)
@@ -369,7 +377,210 @@ class PromiseProxy(Proxy):
 
 
 
 
 def maybe_evaluate(obj):
 def maybe_evaluate(obj):
+    """Attempt to evaluate promise, even if obj is not a promise."""
     try:
     try:
         return obj.__maybe_evaluate__()
         return obj.__maybe_evaluate__()
     except AttributeError:
     except AttributeError:
         return obj
         return obj
+
+#  ############# Module Generation ##########################
+
+# Utilities to dynamically
+# recreate modules, either for lazy loading or
+# to create old modules at runtime instead of
+# having them litter the source tree.
+
+# import fails in python 2.5. fallback to reduce in stdlib
+
+MODULE_DEPRECATED = """
+The module %s is deprecated and will be removed in a future version.
+"""
+
+DEFAULT_ATTRS = {'__file__', '__path__', '__doc__', '__all__'}
+
+# im_func is no longer available in Py3.
+# instead the unbound method itself can be used.
+if sys.version_info[0] == 3:  # pragma: no cover
+    def fun_of_method(method):
+        return method
+else:
+    def fun_of_method(method):  # noqa
+        return method.im_func
+
+
+def getappattr(path):
+    """Get attribute from current_app recursively.
+
+    Example: ``getappattr('amqp.get_task_consumer')``.
+
+    """
+    from celery import current_app
+    return current_app._rgetattr(path)
+
+
+def _compat_periodic_task_decorator(*args, **kwargs):
+    from celery.task import periodic_task
+    return periodic_task(*args, **kwargs)
+
+COMPAT_MODULES = {
+    'celery': {
+        'execute': {
+            'send_task': 'send_task',
+        },
+        'decorators': {
+            'task': 'task',
+            'periodic_task': _compat_periodic_task_decorator,
+        },
+        'log': {
+            'get_default_logger': 'log.get_default_logger',
+            'setup_logger': 'log.setup_logger',
+            'setup_logging_subsystem': 'log.setup_logging_subsystem',
+            'redirect_stdouts_to_logger': 'log.redirect_stdouts_to_logger',
+        },
+        'messaging': {
+            'TaskConsumer': 'amqp.TaskConsumer',
+            'establish_connection': 'connection',
+            'get_consumer_set': 'amqp.TaskConsumer',
+        },
+        'registry': {
+            'tasks': 'tasks',
+        },
+    },
+    'celery.task': {
+        'control': {
+            'broadcast': 'control.broadcast',
+            'rate_limit': 'control.rate_limit',
+            'time_limit': 'control.time_limit',
+            'ping': 'control.ping',
+            'revoke': 'control.revoke',
+            'discard_all': 'control.purge',
+            'inspect': 'control.inspect',
+        },
+        'schedules': 'celery.schedules',
+        'chords': 'celery.canvas',
+    }
+}
+
+
+class class_property(object):
+
+    def __init__(self, getter=None, setter=None):
+        if getter is not None and not isinstance(getter, classmethod):
+            getter = classmethod(getter)
+        if setter is not None and not isinstance(setter, classmethod):
+            setter = classmethod(setter)
+        self.__get = getter
+        self.__set = setter
+
+        info = getter.__get__(object)  # just need the info attrs.
+        self.__doc__ = info.__doc__
+        self.__name__ = info.__name__
+        self.__module__ = info.__module__
+
+    def __get__(self, obj, type=None):
+        if obj and type is None:
+            type = obj.__class__
+        return self.__get.__get__(obj, type)()
+
+    def __set__(self, obj, value):
+        if obj is None:
+            return self
+        return self.__set.__get__(obj)(value)
+
+    def setter(self, setter):
+        return self.__class__(self.__get, setter)
+
+
+def reclassmethod(method):
+    return classmethod(fun_of_method(method))
+
+
+class LazyModule(ModuleType):
+    _compat_modules = ()
+    _all_by_module = {}
+    _direct = {}
+    _object_origins = {}
+
+    def __getattr__(self, name):
+        if name in self._object_origins:
+            module = __import__(self._object_origins[name], None, None, [name])
+            for item in self._all_by_module[module.__name__]:
+                setattr(self, item, getattr(module, item))
+            return getattr(module, name)
+        elif name in self._direct:  # pragma: no cover
+            module = __import__(self._direct[name], None, None, [name])
+            setattr(self, name, module)
+            return module
+        return ModuleType.__getattribute__(self, name)
+
+    def __dir__(self):
+        return list(set(self.__all__) | DEFAULT_ATTRS)
+
+    def __reduce__(self):
+        return import_module, (self.__name__,)
+
+
+def create_module(name, attrs, cls_attrs=None, pkg=None,
+                  base=LazyModule, prepare_attr=None):
+    fqdn = '.'.join([pkg.__name__, name]) if pkg else name
+    cls_attrs = {} if cls_attrs is None else cls_attrs
+    pkg, _, modname = name.rpartition('.')
+    cls_attrs['__module__'] = pkg
+
+    attrs = {
+        attr_name: (prepare_attr(attr) if prepare_attr else attr)
+        for attr_name, attr in items(attrs)
+    }
+    module = sys.modules[fqdn] = type(
+        bytes_if_py2(modname), (base,), cls_attrs)(bytes_if_py2(name))
+    module.__dict__.update(attrs)
+    return module
+
+
+def recreate_module(name, compat_modules=(), by_module={}, direct={},
+                    base=LazyModule, **attrs):
+    old_module = sys.modules[name]
+    origins = get_origins(by_module)
+    compat_modules = COMPAT_MODULES.get(name, ())
+
+    _all = tuple(set(reduce(
+        operator.add,
+        [tuple(v) for v in [compat_modules, origins, direct, attrs]],
+    )))
+    if sys.version_info[0] < 3:
+        _all = [s.encode() for s in _all]
+    cattrs = dict(
+        _compat_modules=compat_modules,
+        _all_by_module=by_module, _direct=direct,
+        _object_origins=origins,
+        __all__=_all,
+    )
+    new_module = create_module(name, attrs, cls_attrs=cattrs, base=base)
+    new_module.__dict__.update({
+        mod: get_compat_module(new_module, mod) for mod in compat_modules
+    })
+    return old_module, new_module
+
+
+def get_compat_module(pkg, name):
+    from .local import Proxy
+
+    def prepare(attr):
+        if isinstance(attr, string_t):
+            return Proxy(getappattr, (attr,))
+        return attr
+
+    attrs = COMPAT_MODULES[pkg.__name__][name]
+    if isinstance(attrs, string_t):
+        fqdn = '.'.join([pkg.__name__, name])
+        module = sys.modules[fqdn] = import_module(attrs)
+        return module
+    attrs['__all__'] = list(attrs)
+    return create_module(name, dict(attrs), pkg=pkg, prepare_attr=prepare)
+
+
+def get_origins(defs):
+    origins = {}
+    for module, attrs in items(defs):
+        origins.update({attr: module for attr in attrs})
+    return origins

+ 22 - 11
celery/platforms.py

@@ -1,6 +1,9 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
-"""Utilities dealing with platform specifics: signals, daemonization,
-users, groups, and so on."""
+"""Platforms.
+
+Utilities dealing with platform specifics: signals, daemonization,
+users, groups, and so on.
+"""
 from __future__ import absolute_import, print_function, unicode_literals
 from __future__ import absolute_import, print_function, unicode_literals
 
 
 import atexit
 import atexit
@@ -95,6 +98,7 @@ SIGMAP = {getattr(_signal, name): name for name in SIGNAMES}
 
 
 
 
 def isatty(fh):
 def isatty(fh):
+    """Return true if the process has a controlling terminal."""
     try:
     try:
         return fh.isatty()
         return fh.isatty()
     except AttributeError:
     except AttributeError:
@@ -121,7 +125,7 @@ class LockFailed(Exception):
 
 
 
 
 class Pidfile(object):
 class Pidfile(object):
-    """Pidfile
+    """Pidfile.
 
 
     This is the type returned by :func:`create_pidlock`.
     This is the type returned by :func:`create_pidlock`.
 
 
@@ -178,7 +182,9 @@ class Pidfile(object):
 
 
     def remove_if_stale(self):
     def remove_if_stale(self):
         """Remove the lock if the process isn't running.
         """Remove the lock if the process isn't running.
-        (does not respond to signals)."""
+
+        I.e. process does not respons to signal.
+        """
         try:
         try:
             pid = self.read_pid()
             pid = self.read_pid()
         except ValueError as exc:
         except ValueError as exc:
@@ -292,6 +298,7 @@ def fd_by_path(paths):
 
 
 
 
 class DaemonContext(object):
 class DaemonContext(object):
+    """Context manager daemonizing the process."""
 
 
     _is_open = False
     _is_open = False
 
 
@@ -445,9 +452,9 @@ def parse_gid(gid):
 
 
 
 
 def _setgroups_hack(groups):
 def _setgroups_hack(groups):
-    """:fun:`setgroups` may have a platform-dependent limit,
-    and it's not always possible to know in advance what this limit
-    is, so we use this ugly hack stolen from glibc."""
+    # :fun:`setgroups` may have a platform-dependent limit,
+    # and it's not always possible to know in advance what this limit
+    # is, so we use this ugly hack stolen from glibc.
     groups = groups[:]
     groups = groups[:]
 
 
     while 1:
     while 1:
@@ -481,8 +488,11 @@ def setgroups(groups):
 
 
 
 
 def initgroups(uid, gid):
 def initgroups(uid, gid):
-    """Compat version of :func:`os.initgroups` that was first
-    added to Python 2.7."""
+    """Init process group permissions.
+
+    Compat version of :func:`os.initgroups` that was first
+    added to Python 2.7.
+    """
     if not pwd:  # pragma: no cover
     if not pwd:  # pragma: no cover
         return
         return
     username = pwd.getpwuid(uid)[0]
     username = pwd.getpwuid(uid)[0]
@@ -674,6 +684,7 @@ ignore_signal = signals.ignore                # compat
 
 
 
 
 def signal_name(signum):
 def signal_name(signum):
+    """Return name of signal from signal number."""
     return SIGMAP[signum][3:]
     return SIGMAP[signum][3:]
 
 
 
 
@@ -699,12 +710,12 @@ def set_process_title(progname, info=None):
 if os.environ.get('NOSETPS'):  # pragma: no cover
 if os.environ.get('NOSETPS'):  # pragma: no cover
 
 
     def set_mp_process_title(*a, **k):
     def set_mp_process_title(*a, **k):
+        """Disabled feature."""
         pass
         pass
 else:
 else:
 
 
     def set_mp_process_title(progname, info=None, hostname=None):  # noqa
     def set_mp_process_title(progname, info=None, hostname=None):  # noqa
-        """Set the :command:`ps` name using the :mod:`multiprocessing`
-        process name.
+        """Set the :command:`ps` name from the current process name.
 
 
         Only works if :pypi:`setproctitle` is installed.
         Only works if :pypi:`setproctitle` is installed.
         """
         """

+ 33 - 23
celery/result.py

@@ -55,7 +55,7 @@ def allow_join_result():
 
 
 
 
 class ResultBase(object):
 class ResultBase(object):
-    """Base class for all results"""
+    """Base class for results."""
 
 
     #: Parent result (if part of a chain)
     #: Parent result (if part of a chain)
     parent = None
     parent = None
@@ -70,6 +70,7 @@ class AsyncResult(ResultBase):
         id (str): See :attr:`id`.
         id (str): See :attr:`id`.
         backend (Backend): See :attr:`backend`.
         backend (Backend): See :attr:`backend`.
     """
     """
+
     app = None
     app = None
 
 
     #: Error raised for timeouts.
     #: Error raised for timeouts.
@@ -200,7 +201,9 @@ class AsyncResult(ResultBase):
             node = node.parent
             node = node.parent
 
 
     def collect(self, intermediate=False, **kwargs):
     def collect(self, intermediate=False, **kwargs):
-        """Iterator, like :meth:`get` will wait for the task to complete,
+        """Collect results as they return.
+
+        Iterator, like :meth:`get` will wait for the task to complete,
         but will also follow :class:`AsyncResult` and :class:`ResultSet`
         but will also follow :class:`AsyncResult` and :class:`ResultSet`
         returned by the task, yielding ``(result, value)`` tuples for each
         returned by the task, yielding ``(result, value)`` tuples for each
         result in the tree.
         result in the tree.
@@ -265,7 +268,7 @@ class AsyncResult(ResultBase):
                     raise IncompleteStream()
                     raise IncompleteStream()
 
 
     def ready(self):
     def ready(self):
-        """Returns :const:`True` if the task has been executed.
+        """Return :const:`True` if the task started executing.
 
 
         If the task is still running, pending, or is waiting
         If the task is still running, pending, or is waiting
         for retry then :const:`False` is returned.
         for retry then :const:`False` is returned.
@@ -273,11 +276,11 @@ class AsyncResult(ResultBase):
         return self.state in self.backend.READY_STATES
         return self.state in self.backend.READY_STATES
 
 
     def successful(self):
     def successful(self):
-        """Returns :const:`True` if the task executed successfully."""
+        """Return :const:`True` if the task executed successfully."""
         return self.state == states.SUCCESS
         return self.state == states.SUCCESS
 
 
     def failed(self):
     def failed(self):
-        """Returns :const:`True` if the task failed."""
+        """Return :const:`True` if the task failed."""
         return self.state == states.FAILURE
         return self.state == states.FAILURE
 
 
     def throw(self, *args, **kwargs):
     def throw(self, *args, **kwargs):
@@ -308,11 +311,11 @@ class AsyncResult(ResultBase):
         return graph
         return graph
 
 
     def __str__(self):
     def __str__(self):
-        """`str(self) -> self.id`"""
+        """`str(self) -> self.id`."""
         return str(self.id)
         return str(self.id)
 
 
     def __hash__(self):
     def __hash__(self):
-        """`hash(self) -> hash(self.id)`"""
+        """`hash(self) -> hash(self.id)`."""
         return hash(self.id)
         return hash(self.id)
 
 
     def __repr__(self):
     def __repr__(self):
@@ -380,9 +383,13 @@ class AsyncResult(ResultBase):
 
 
     @property
     @property
     def result(self):
     def result(self):
-        """When the task has been executed, this contains the return value.
-        If the task raised an exception, this will be the exception
-        instance."""
+        """Task return value.
+
+        Note:
+            When the task has been executed, this contains the return value.
+            If the task raised an exception, this will be the exception
+            instance.
+        """
         return self._get_task_meta()['result']
         return self._get_task_meta()['result']
     info = result
     info = result
 
 
@@ -425,7 +432,7 @@ class AsyncResult(ResultBase):
 
 
     @property
     @property
     def task_id(self):
     def task_id(self):
-        """compat alias to :attr:`id`"""
+        """compat alias to :attr:`id`."""
         return self.id
         return self.id
 
 
     @task_id.setter  # noqa
     @task_id.setter  # noqa
@@ -436,11 +443,12 @@ class AsyncResult(ResultBase):
 @Thenable.register
 @Thenable.register
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class ResultSet(ResultBase):
 class ResultSet(ResultBase):
-    """Working with more than one result.
+    """A collection of results.
 
 
     Arguments:
     Arguments:
         results (Sequence[AsyncResult]): List of result instances.
         results (Sequence[AsyncResult]): List of result instances.
     """
     """
+
     _app = None
     _app = None
 
 
     #: List of results in in the set.
     #: List of results in in the set.
@@ -485,16 +493,17 @@ class ResultSet(ResultBase):
             raise KeyError(result)
             raise KeyError(result)
 
 
     def discard(self, result):
     def discard(self, result):
-        """Remove result from the set if it is a member,
-        or do nothing if it's not."""
+        """Remove result from the set if it is a member.
+
+        Does nothing if it's not a member.
+        """
         try:
         try:
             self.remove(result)
             self.remove(result)
         except KeyError:
         except KeyError:
             pass
             pass
 
 
     def update(self, results):
     def update(self, results):
-        """Update set with the union of itself and an iterable with
-        results."""
+        """Extend from iterable of results."""
         self.results.extend(r for r in results if r not in self.results)
         self.results.extend(r for r in results if r not in self.results)
 
 
     def clear(self):
     def clear(self):
@@ -502,7 +511,7 @@ class ResultSet(ResultBase):
         self.results[:] = []  # don't create new list.
         self.results[:] = []  # don't create new list.
 
 
     def successful(self):
     def successful(self):
-        """Was all of the tasks successful?
+        """Return true if all tasks successful.
 
 
         Returns:
         Returns:
             bool: true if all of the tasks finished
             bool: true if all of the tasks finished
@@ -511,7 +520,7 @@ class ResultSet(ResultBase):
         return all(result.successful() for result in self.results)
         return all(result.successful() for result in self.results)
 
 
     def failed(self):
     def failed(self):
-        """Did any of the tasks fail?
+        """Return true if any of the tasks failed.
 
 
         Returns:
         Returns:
             bool: true if one of the tasks failed.
             bool: true if one of the tasks failed.
@@ -524,7 +533,7 @@ class ResultSet(ResultBase):
             result.maybe_throw(callback=callback, propagate=propagate)
             result.maybe_throw(callback=callback, propagate=propagate)
 
 
     def waiting(self):
     def waiting(self):
-        """Are any of the tasks incomplete?
+        """Return true if any of the tasks are incomplate.
 
 
         Returns:
         Returns:
             bool: true if one of the tasks are still
             bool: true if one of the tasks are still
@@ -576,7 +585,7 @@ class ResultSet(ResultBase):
         return iter(self.results)
         return iter(self.results)
 
 
     def __getitem__(self, index):
     def __getitem__(self, index):
-        """`res[i] -> res.results[i]`"""
+        """`res[i] -> res.results[i]`."""
         return self.results[index]
         return self.results[index]
 
 
     @deprecated.Callable('4.0', '5.0')
     @deprecated.Callable('4.0', '5.0')
@@ -605,7 +614,7 @@ class ResultSet(ResultBase):
 
 
     def get(self, timeout=None, propagate=True, interval=0.5,
     def get(self, timeout=None, propagate=True, interval=0.5,
             callback=None, no_ack=True, on_message=None):
             callback=None, no_ack=True, on_message=None):
-        """See :meth:`join`
+        """See :meth:`join`.
 
 
         This is here for API compatibility with :class:`AsyncResult`,
         This is here for API compatibility with :class:`AsyncResult`,
         in addition it uses :meth:`join_native` if available for the
         in addition it uses :meth:`join_native` if available for the
@@ -621,7 +630,7 @@ class ResultSet(ResultBase):
 
 
     def join(self, timeout=None, propagate=True, interval=0.5,
     def join(self, timeout=None, propagate=True, interval=0.5,
              callback=None, no_ack=True, on_message=None, on_interval=None):
              callback=None, no_ack=True, on_message=None, on_interval=None):
-        """Gathers the results of all tasks as a list in order.
+        """Gather the results of all tasks as a list in order.
 
 
         Note:
         Note:
             This can be an expensive operation for result store
             This can be an expensive operation for result store
@@ -924,7 +933,7 @@ class EagerResult(AsyncResult):
 
 
     @property
     @property
     def result(self):
     def result(self):
-        """The tasks return value"""
+        """The tasks return value."""
         return self._result
         return self._result
 
 
     @property
     @property
@@ -944,6 +953,7 @@ class EagerResult(AsyncResult):
 
 
 
 
 def result_from_tuple(r, app=None):
 def result_from_tuple(r, app=None):
+    """Deserialize result from tuple."""
     # earlier backends may just pickle, so check if
     # earlier backends may just pickle, so check if
     # result is already prepared.
     # result is already prepared.
     app = app_or_default(app)
     app = app_or_default(app)

+ 51 - 23
celery/schedules.py

@@ -74,6 +74,7 @@ class schedule(object):
             (class:`~datetime.datetime`).
             (class:`~datetime.datetime`).
         app (~@Celery): Celery app instance.
         app (~@Celery): Celery app instance.
     """
     """
+
     relative = False
     relative = False
 
 
     def __init__(self, run_every=None, relative=False, nowfun=None, app=None):
     def __init__(self, run_every=None, relative=False, nowfun=None, app=None):
@@ -92,14 +93,16 @@ class schedule(object):
         )
         )
 
 
     def is_due(self, last_run_at):
     def is_due(self, last_run_at):
-        """Returns tuple of two items ``(is_due, next_time_to_check)``,
-        where next time to check is in seconds.
+        """Return tuple of ``(is_due, next_time_to_check)``.
+
+        Notes:
+            - next time to check is in seconds.
 
 
-        * ``(True, 20)``, means the task should be run now, and the next
-            time to check is in 20 seconds.
+            - ``(True, 20)``, means the task should be run now, and the next
+                time to check is in 20 seconds.
 
 
-        * ``(False, 12.3)``, means the task is not due, but that the scheduler
-          should check again in 12.3 seconds.
+            - ``(False, 12.3)``, means the task is not due, but that the
+              scheduler should check again in 12.3 seconds.
 
 
         The next time to check is used to save energy/CPU cycles,
         The next time to check is used to save energy/CPU cycles,
         it does not need to be accurate but will influence the precision
         it does not need to be accurate but will influence the precision
@@ -174,7 +177,9 @@ class schedule(object):
 
 
 
 
 class crontab_parser(object):
 class crontab_parser(object):
-    """Parser for Crontab expressions.  Any expression of the form 'groups'
+    """Parser for Crontab expressions.
+
+    Any expression of the form 'groups'
     (see BNF grammar below) is accepted and expanded to a set of numbers.
     (see BNF grammar below) is accepted and expanded to a set of numbers.
     These numbers represent the units of time that the Crontab needs to
     These numbers represent the units of time that the Crontab needs to
     run on:
     run on:
@@ -218,6 +223,7 @@ class crontab_parser(object):
 
 
         :math:`max_ + min_ - 1`
         :math:`max_ + min_ - 1`
     """
     """
+
     ParseException = ParseException
     ParseException = ParseException
 
 
     _range = r'(\w+?)-(\w+)'
     _range = r'(\w+?)-(\w+)'
@@ -296,7 +302,9 @@ class crontab_parser(object):
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class crontab(schedule):
 class crontab(schedule):
-    """A Crontab can be used as the ``run_every`` value of a
+    """Crontab schedule.
+
+    A Crontab can be used as the ``run_every`` value of a
     periodic task entry to add :manpage:`crontab(5)`-like scheduling.
     periodic task entry to add :manpage:`crontab(5)`-like scheduling.
 
 
     Like a :manpage:`cron(5)`-job, you can specify units of time of when
     Like a :manpage:`cron(5)`-job, you can specify units of time of when
@@ -388,7 +396,9 @@ class crontab(schedule):
 
 
     @staticmethod
     @staticmethod
     def _expand_cronspec(cronspec, max_, min_=0):
     def _expand_cronspec(cronspec, max_, min_=0):
-        """Takes the given cronspec argument in one of the forms:
+        """Expand cron specification.
+
+        Takes the given cronspec argument in one of the forms:
 
 
         .. code-block:: text
         .. code-block:: text
 
 
@@ -430,7 +440,9 @@ class crontab(schedule):
         return result
         return result
 
 
     def _delta_to_next(self, last_run_at, next_hour, next_minute):
     def _delta_to_next(self, last_run_at, next_hour, next_minute):
-        """Takes a :class:`~datetime.datetime` of last run, next minute and hour,
+        """Find next delta.
+
+        Takes a :class:`~datetime.datetime` of last run, next minute and hour,
         and returns a :class:`~celery.utils.time.ffwd` for the next
         and returns a :class:`~celery.utils.time.ffwd` for the next
         scheduled day and time.
         scheduled day and time.
 
 
@@ -569,15 +581,21 @@ class crontab(schedule):
         return self.to_local(last_run_at), delta, self.to_local(now)
         return self.to_local(last_run_at), delta, self.to_local(now)
 
 
     def remaining_estimate(self, last_run_at, ffwd=ffwd):
     def remaining_estimate(self, last_run_at, ffwd=ffwd):
-        """Returns when the periodic task should run next as a
-        :class:`~datetime.timedelta`."""
+        """Estimate of next run time.
+
+        Returns when the periodic task should run next as a
+        :class:`~datetime.timedelta`.
+        """
         return remaining(*self.remaining_delta(last_run_at, ffwd=ffwd))
         return remaining(*self.remaining_delta(last_run_at, ffwd=ffwd))
 
 
     def is_due(self, last_run_at):
     def is_due(self, last_run_at):
-        """Returns tuple of two items ``(is_due, next_time_to_run)``,
-        where next time to run is in seconds.
+        """Return tuple of ``(is_due, next_time_to_run)``.
 
 
-        See :meth:`celery.schedules.schedule.is_due` for more information.
+        Note:
+            Next time to run is in seconds.
+
+        SeeAlso:
+            :meth:`celery.schedules.schedule.is_due` for more information.
         """
         """
         rem_delta = self.remaining_estimate(last_run_at)
         rem_delta = self.remaining_estimate(last_run_at)
         rem = max(rem_delta.total_seconds(), 0)
         rem = max(rem_delta.total_seconds(), 0)
@@ -606,6 +624,7 @@ class crontab(schedule):
 
 
 
 
 def maybe_schedule(s, relative=False, app=None):
 def maybe_schedule(s, relative=False, app=None):
+    """Return schedule from number, timedelta, or actual schedule."""
     if s is not None:
     if s is not None:
         if isinstance(s, numbers.Number):
         if isinstance(s, numbers.Number):
             s = timedelta(seconds=s)
             s = timedelta(seconds=s)
@@ -618,7 +637,9 @@ def maybe_schedule(s, relative=False, app=None):
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class solar(schedule):
 class solar(schedule):
-    """A solar event can be used as the ``run_every`` value of a
+    """Solar event.
+
+    A solar event can be used as the ``run_every`` value of a
     periodic task entry to schedule based on certain solar events.
     periodic task entry to schedule based on certain solar events.
 
 
     Notes:
     Notes:
@@ -727,10 +748,14 @@ class solar(schedule):
         )
         )
 
 
     def remaining_estimate(self, last_run_at):
     def remaining_estimate(self, last_run_at):
-        """Returns when the periodic task should run next as a
-        :class:`~datetime.timedelta`, or if it shouldn't run today (e.g.,
-        the sun does not rise today), returns the time when the next check
-        should take place."""
+        """Return estimate of next time to run.
+
+        Returns:
+            ~datetime.timedelta: when the periodic task should
+                run next, or if it shouldn't run today (e.g., the sun does
+                not rise today), returns the time when the next check
+                should take place.
+        """
         last_run_at = self.maybe_make_aware(last_run_at)
         last_run_at = self.maybe_make_aware(last_run_at)
         last_run_at_utc = localize(last_run_at, timezone.utc)
         last_run_at_utc = localize(last_run_at, timezone.utc)
         self.cal.date = last_run_at_utc
         self.cal.date = last_run_at_utc
@@ -752,10 +777,13 @@ class solar(schedule):
         return delta
         return delta
 
 
     def is_due(self, last_run_at):
     def is_due(self, last_run_at):
-        """Returns tuple of two items ``(is_due, next_time_to_run)``,
-        where next time to run is in seconds.
+        """Return tuple of ``(is_due, next_time_to_run)``.
+
+        Note:
+            next time to run is in seconds.
 
 
-        See :meth:`celery.schedules.schedule.is_due` for more information.
+        See Also:
+            :meth:`celery.schedules.schedule.is_due` for more information.
         """
         """
         rem_delta = self.remaining_estimate(last_run_at)
         rem_delta = self.remaining_estimate(last_run_at)
         rem = max(rem_delta.total_seconds(), 0)
         rem = max(rem_delta.total_seconds(), 0)

+ 7 - 7
celery/security/certificate.py

@@ -32,33 +32,33 @@ class Certificate(object):
         return bytes_to_str(self._cert.get_serial_number())
         return bytes_to_str(self._cert.get_serial_number())
 
 
     def get_issuer(self):
     def get_issuer(self):
-        """Return issuer (CA) as a string"""
+        """Return issuer (CA) as a string."""
         return ' '.join(bytes_to_str(x[1]) for x in
         return ' '.join(bytes_to_str(x[1]) for x in
                         self._cert.get_issuer().get_components())
                         self._cert.get_issuer().get_components())
 
 
     def get_id(self):
     def get_id(self):
-        """Serial number/issuer pair uniquely identifies a certificate"""
+        """Serial number/issuer pair uniquely identifies a certificate."""
         return '{0} {1}'.format(self.get_issuer(), self.get_serial_number())
         return '{0} {1}'.format(self.get_issuer(), self.get_serial_number())
 
 
     def verify(self, data, signature, digest):
     def verify(self, data, signature, digest):
-        """Verifies the signature for string containing data."""
+        """Verify signature for string containing data."""
         with reraise_errors('Bad signature: {0!r}'):
         with reraise_errors('Bad signature: {0!r}'):
             crypto.verify(self._cert, signature, data, digest)
             crypto.verify(self._cert, signature, data, digest)
 
 
 
 
 class CertStore(object):
 class CertStore(object):
-    """Base class for certificate stores"""
+    """Base class for certificate stores."""
 
 
     def __init__(self):
     def __init__(self):
         self._certs = {}
         self._certs = {}
 
 
     def itercerts(self):
     def itercerts(self):
-        """an iterator over the certificates"""
+        """Return certificate iterator."""
         for c in values(self._certs):
         for c in values(self._certs):
             yield c
             yield c
 
 
     def __getitem__(self, id):
     def __getitem__(self, id):
-        """get certificate by id"""
+        """Get certificate by id."""
         try:
         try:
             return self._certs[bytes_to_str(id)]
             return self._certs[bytes_to_str(id)]
         except KeyError:
         except KeyError:
@@ -72,7 +72,7 @@ class CertStore(object):
 
 
 
 
 class FSCertStore(CertStore):
 class FSCertStore(CertStore):
-    """File system certificate store"""
+    """File system certificate store."""
 
 
     def __init__(self, path):
     def __init__(self, path):
         CertStore.__init__(self)
         CertStore.__init__(self)

+ 1 - 0
celery/security/key.py

@@ -10,6 +10,7 @@ __all__ = ['PrivateKey']
 
 
 
 
 class PrivateKey(object):
 class PrivateKey(object):
+    """Represents a private key."""
 
 
     def __init__(self, key):
     def __init__(self, key):
         with reraise_errors('Invalid private key: {0!r}'):
         with reraise_errors('Invalid private key: {0!r}'):

+ 4 - 3
celery/security/serialization.py

@@ -19,6 +19,7 @@ PY3 = sys.version_info[0] == 3
 
 
 
 
 class SecureSerializer(object):
 class SecureSerializer(object):
+    """Signed serializer."""
 
 
     def __init__(self, key=None, cert=None, cert_store=None,
     def __init__(self, key=None, cert=None, cert_store=None,
                  digest='sha1', serializer='json'):
                  digest='sha1', serializer='json'):
@@ -29,7 +30,7 @@ class SecureSerializer(object):
         self._serializer = serializer
         self._serializer = serializer
 
 
     def serialize(self, data):
     def serialize(self, data):
-        """serialize data structure into string"""
+        """Serialize data structure into string."""
         assert self._key is not None
         assert self._key is not None
         assert self._cert is not None
         assert self._cert is not None
         with reraise_errors('Unable to serialize: {0!r}', (Exception,)):
         with reraise_errors('Unable to serialize: {0!r}', (Exception,)):
@@ -45,7 +46,7 @@ class SecureSerializer(object):
                               signer=self._cert.get_id())
                               signer=self._cert.get_id())
 
 
     def deserialize(self, data):
     def deserialize(self, data):
-        """deserialize data structure from string"""
+        """Deserialize data structure from string."""
         assert self._cert_store is not None
         assert self._cert_store is not None
         with reraise_errors('Unable to deserialize: {0!r}', (Exception,)):
         with reraise_errors('Unable to deserialize: {0!r}', (Exception,)):
             payload = self._unpack(data)
             payload = self._unpack(data)
@@ -90,7 +91,7 @@ class SecureSerializer(object):
 
 
 def register_auth(key=None, cert=None, store=None, digest='sha1',
 def register_auth(key=None, cert=None, store=None, digest='sha1',
                   serializer='json'):
                   serializer='json'):
-    """register security serializer"""
+    """Register security serializer."""
     s = SecureSerializer(key and PrivateKey(key),
     s = SecureSerializer(key and PrivateKey(key),
                          cert and Certificate(cert),
                          cert and Certificate(cert),
                          store and FSCertStore(store),
                          store and FSCertStore(store),

+ 1 - 0
celery/security/utils.py

@@ -19,6 +19,7 @@ __all__ = ['reraise_errors']
 
 
 @contextmanager
 @contextmanager
 def reraise_errors(msg='{0!r}', errors=None):
 def reraise_errors(msg='{0!r}', errors=None):
+    """Context reraising crypto errors as :exc:`SecurityError`."""
     assert crypto is not None
     assert crypto is not None
     errors = (crypto.Error,) if errors is None else errors
     errors = (crypto.Error,) if errors is None else errors
     try:
     try:

+ 3 - 1
celery/states.py

@@ -92,7 +92,9 @@ def precedence(state):
 
 
 
 
 class state(str):
 class state(str):
-    """State is a subclass of :class:`str`, implementing comparison
+    """Task state.
+
+    State is a subclass of :class:`str`, implementing comparison
     methods adhering to state precedence rules::
     methods adhering to state precedence rules::
 
 
         >>> from celery.states import state, PENDING, SUCCESS
         >>> from celery.states import state, PENDING, SUCCESS

+ 9 - 9
celery/task/base.py

@@ -12,11 +12,8 @@ from kombu import Exchange
 
 
 from celery import current_app
 from celery import current_app
 from celery.app.task import Context, Task as BaseTask, _reprtask
 from celery.app.task import Context, Task as BaseTask, _reprtask
-from celery.five import (
-    class_property, reclassmethod,
-    python_2_unicode_compatible, with_metaclass,
-)
-from celery.local import Proxy
+from celery.five import python_2_unicode_compatible, with_metaclass
+from celery.local import Proxy, class_property, reclassmethod
 from celery.schedules import maybe_schedule
 from celery.schedules import maybe_schedule
 from celery.utils.log import get_task_logger
 from celery.utils.log import get_task_logger
 
 
@@ -56,6 +53,7 @@ class TaskType(type):
     If no :attr:`Task.name` attribute is provided, then the name is generated
     If no :attr:`Task.name` attribute is provided, then the name is generated
     from the module and class name.
     from the module and class name.
     """
     """
+
     _creation_count = {}  # used by old non-abstract task classes
     _creation_count = {}  # used by old non-abstract task classes
 
 
     def __new__(cls, name, bases, attrs):
     def __new__(cls, name, bases, attrs):
@@ -130,6 +128,7 @@ class Task(BaseTask):
 
 
     Modern applications should use :class:`celery.Task` instead.
     Modern applications should use :class:`celery.Task` instead.
     """
     """
+
     abstract = True
     abstract = True
     __bound__ = False
     __bound__ = False
     __v2_compat__ = True
     __v2_compat__ = True
@@ -226,8 +225,9 @@ class Task(BaseTask):
 
 
     @classmethod
     @classmethod
     def get_consumer(self, connection=None, queues=None, **kwargs):
     def get_consumer(self, connection=None, queues=None, **kwargs):
-        """Deprecated method used to get consumer for the queue
-        this task is sent to.
+        """Get consumer for the queue this task is sent to.
+
+        Deprecated!
 
 
         Should be replaced by :class:`@amqp.TaskConsumer`.
         Should be replaced by :class:`@amqp.TaskConsumer`.
         """
         """
@@ -239,8 +239,8 @@ class Task(BaseTask):
 
 
 
 
 class PeriodicTask(Task):
 class PeriodicTask(Task):
-    """A periodic task is a task that adds itself to the
-    :setting:`beat_schedule` setting."""
+    """A task that adds itself to the :setting:`beat_schedule` setting."""
+
     abstract = True
     abstract = True
     ignore_result = True
     ignore_result = True
     relative = False
     relative = False

+ 4 - 0
celery/utils/abstract.py

@@ -33,6 +33,8 @@ class _AbstractClass(object):
 
 
 
 
 class CallableTask(_AbstractClass, Callable):  # pragma: no cover
 class CallableTask(_AbstractClass, Callable):  # pragma: no cover
+    """Task interface."""
+
     __required_attributes__ = frozenset({
     __required_attributes__ = frozenset({
         'delay', 'apply_async', 'apply',
         'delay', 'apply_async', 'apply',
     })
     })
@@ -55,6 +57,8 @@ class CallableTask(_AbstractClass, Callable):  # pragma: no cover
 
 
 
 
 class CallableSignature(CallableTask):  # pragma: no cover
 class CallableSignature(CallableTask):  # pragma: no cover
+    """Celery Signature interface."""
+
     __required_attributes__ = frozenset({
     __required_attributes__ = frozenset({
         'clone', 'freeze', 'set', 'link', 'link_error', '__or__',
         'clone', 'freeze', 'set', 'link', 'link_error', '__or__',
     })
     })

+ 13 - 7
celery/utils/collections.py

@@ -45,6 +45,7 @@ REPR_LIMITED_SET = """\
 
 
 
 
 def force_mapping(m):
 def force_mapping(m):
+    """Wrap object into supporting the mapping interface if necessary."""
     if isinstance(m, (LazyObject, LazySettings)):
     if isinstance(m, (LazyObject, LazySettings)):
         m = m._wrapped
         m = m._wrapped
     return DictAttribute(m) if not isinstance(m, Mapping) else m
     return DictAttribute(m) if not isinstance(m, Mapping) else m
@@ -61,6 +62,7 @@ def lpmerge(L, R):
 
 
 
 
 class OrderedDict(_OrderedDict):
 class OrderedDict(_OrderedDict):
+    """Dict where insertion order matters."""
 
 
     if PY3:  # pragma: no cover
     if PY3:  # pragma: no cover
         def _LRUkey(self):
         def _LRUkey(self):
@@ -109,11 +111,13 @@ class OrderedDict(_OrderedDict):
 
 
 
 
 class AttributeDictMixin(object):
 class AttributeDictMixin(object):
-    """Augment classes with a Mapping interface by adding attribute
-    access (i.e., `d.key -> d[key]`)."""
+    """Mixin for Mapping interface that adds attribute access.
+
+    I.e., `d.key -> d[key]`).
+    """
 
 
     def __getattr__(self, k):
     def __getattr__(self, k):
-        """`d.key -> d[key]`"""
+        """`d.key -> d[key]`."""
         try:
         try:
             return self[k]
             return self[k]
         except KeyError:
         except KeyError:
@@ -122,13 +126,12 @@ class AttributeDictMixin(object):
                     type(self).__name__, k))
                     type(self).__name__, k))
 
 
     def __setattr__(self, key, value):
     def __setattr__(self, key, value):
-        """`d[key] = value -> d.key = value`"""
+        """`d[key] = value -> d.key = value`."""
         self[key] = value
         self[key] = value
 
 
 
 
 class AttributeDict(dict, AttributeDictMixin):
 class AttributeDict(dict, AttributeDictMixin):
     """Dict subclass with attribute access."""
     """Dict subclass with attribute access."""
-    pass
 
 
 
 
 class DictAttribute(object):
 class DictAttribute(object):
@@ -137,6 +140,7 @@ class DictAttribute(object):
     `obj[k] -> obj.k`
     `obj[k] -> obj.k`
     `obj[k] = val -> obj.k = val`
     `obj[k] = val -> obj.k = val`
     """
     """
+
     obj = None
     obj = None
 
 
     def __init__(self, obj):
     def __init__(self, obj):
@@ -205,6 +209,7 @@ MutableMapping.register(DictAttribute)
 
 
 
 
 class ChainMap(MutableMapping):
 class ChainMap(MutableMapping):
+    """Key lookup on a sequence of maps."""
 
 
     key_t = None
     key_t = None
     changes = None
     changes = None
@@ -297,8 +302,6 @@ class ChainMap(MutableMapping):
         return cls(dict.fromkeys(iterable, *args))
         return cls(dict.fromkeys(iterable, *args))
 
 
     def copy(self):
     def copy(self):
-        """New ChainMap or subclass with a new copy of maps[0] and
-        refs to maps[1:]."""
         return self.__class__(self.maps[0].copy(), *self.maps[1:])
         return self.__class__(self.maps[0].copy(), *self.maps[1:])
     __copy__ = copy  # Py2
     __copy__ = copy  # Py2
 
 
@@ -633,6 +636,7 @@ MutableSet.register(LimitedSet)
 
 
 
 
 class Evictable(object):
 class Evictable(object):
+    """Mixin for classes supporting the ``evict`` method."""
 
 
     Empty = Empty
     Empty = Empty
 
 
@@ -657,6 +661,7 @@ class Evictable(object):
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class Messagebuffer(Evictable):
 class Messagebuffer(Evictable):
+    """A buffer of pending messages."""
 
 
     Empty = Empty
     Empty = Empty
 
 
@@ -719,6 +724,7 @@ Sequence.register(Messagebuffer)
 
 
 @python_2_unicode_compatible
 @python_2_unicode_compatible
 class BufferMap(OrderedDict, Evictable):
 class BufferMap(OrderedDict, Evictable):
+    """Map of buffers."""
 
 
     Buffer = Messagebuffer
     Buffer = Messagebuffer
     Empty = Empty
     Empty = Empty

+ 14 - 6
celery/utils/debug.py

@@ -46,8 +46,10 @@ def _on_blocking(signum, frame):
 
 
 @contextmanager
 @contextmanager
 def blockdetection(timeout):
 def blockdetection(timeout):
-    """A timeout context using ``SIGALRM`` that can be used to detect blocking
-    functions."""
+    """Context that raises an exception if process is blocking.
+
+    Uses ``SIGALRM`` to detect blocking functions.
+    """
     if not timeout:
     if not timeout:
         yield
         yield
     else:
     else:
@@ -147,8 +149,11 @@ def mem_rss():
 
 
 
 
 def ps():  # pragma: no cover
 def ps():  # pragma: no cover
-    """Return the global :class:`psutil.Process` instance,
-    or :const:`None` if :pypi:`psutil` is not installed."""
+    """Return the global :class:`psutil.Process` instance.
+
+    Note:
+        Returns :const:`None` if :pypi:`psutil` is not installed.
+    """
     global _process
     global _process
     if _process is None and Process is not None:
     if _process is None and Process is not None:
         _process = Process(os.getpid())
         _process = Process(os.getpid())
@@ -163,8 +168,11 @@ def _process_memory_info(process):
 
 
 
 
 def cry(out=None, sepchr='=', seplen=49):  # pragma: no cover
 def cry(out=None, sepchr='=', seplen=49):  # pragma: no cover
-    """Return stack-trace of all active threads,
-    taken from https://gist.github.com/737056."""
+    """Return stack-trace of all active threads.
+
+    See Also:
+        Taken from https://gist.github.com/737056.
+    """
     import threading
     import threading
 
 
     out = WhateverIO() if out is None else out
     out = WhateverIO() if out is None else out

+ 2 - 0
celery/utils/deprecated.py

@@ -25,6 +25,7 @@ DEPRECATION_FMT = """
 
 
 def warn(description=None, deprecation=None,
 def warn(description=None, deprecation=None,
          removal=None, alternative=None, stacklevel=2):
          removal=None, alternative=None, stacklevel=2):
+    """Warn of (pending) deprecation."""
     ctx = {'description': description,
     ctx = {'description': description,
            'deprecation': deprecation, 'removal': removal,
            'deprecation': deprecation, 'removal': removal,
            'alternative': alternative}
            'alternative': alternative}
@@ -66,6 +67,7 @@ def Callable(deprecation=None, removal=None,
 
 
 def Property(deprecation=None, removal=None,
 def Property(deprecation=None, removal=None,
              alternative=None, description=None):
              alternative=None, description=None):
+    """Decorator for deprecated properties."""
     def _inner(fun):
     def _inner(fun):
         return _deprecated_property(
         return _deprecated_property(
             fun, deprecation=deprecation, removal=removal,
             fun, deprecation=deprecation, removal=removal,

+ 1 - 0
celery/utils/dispatch/__init__.py

@@ -1,4 +1,5 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
+"""Observer pattern."""
 from __future__ import absolute_import, unicode_literals
 from __future__ import absolute_import, unicode_literals
 
 
 from .signal import Signal
 from .signal import Signal

+ 17 - 13
celery/utils/dispatch/saferef.py

@@ -18,7 +18,7 @@ PY3 = sys.version_info[0] == 3
 
 
 
 
 def safe_ref(target, on_delete=None):  # pragma: no cover
 def safe_ref(target, on_delete=None):  # pragma: no cover
-    """Return a *safe* weak reference to a callable target
+    """Return a *safe* weak reference to a callable target.
 
 
     Arguments:
     Arguments:
         target (Any): The object to be weakly referenced, if it's a
         target (Any): The object to be weakly referenced, if it's a
@@ -85,7 +85,7 @@ class BoundMethodWeakref(object):  # pragma: no cover
     _all_instances = weakref.WeakValueDictionary()
     _all_instances = weakref.WeakValueDictionary()
 
 
     def __new__(cls, target, on_delete=None, *arguments, **named):
     def __new__(cls, target, on_delete=None, *arguments, **named):
-        """Create new instance or return current instance
+        """Create new instance or return current instance.
 
 
         Note:
         Note:
             Basically this method of construction allows us to
             Basically this method of construction allows us to
@@ -127,7 +127,7 @@ class BoundMethodWeakref(object):  # pragma: no cover
                 which will be passed a pointer to this object.
                 which will be passed a pointer to this object.
         """
         """
         def remove(weak, self=self):
         def remove(weak, self=self):
-            """Set self.is_dead to true when method or instance is destroyed"""
+            """Set is_dead to true when method or instance is destroyed."""
             methods = self.deletion_methods[:]
             methods = self.deletion_methods[:]
             del(self.deletion_methods[:])
             del(self.deletion_methods[:])
             try:
             try:
@@ -153,7 +153,7 @@ class BoundMethodWeakref(object):  # pragma: no cover
         self.fun_name = str(target.__func__.__name__)
         self.fun_name = str(target.__func__.__name__)
 
 
     def calculate_key(cls, target):
     def calculate_key(cls, target):
-        """Calculate the reference key for this reference
+        """Calculate the reference key for this reference.
 
 
         Returns:
         Returns:
             Tuple[int, int]: Currently this is a two-tuple of
             Tuple[int, int]: Currently this is a two-tuple of
@@ -174,19 +174,19 @@ class BoundMethodWeakref(object):  # pragma: no cover
         return str(self)
         return str(self)
 
 
     def __bool__(self):
     def __bool__(self):
-        """Whether we're still a valid reference"""
+        """Whether we're still a valid reference."""
         return self() is not None
         return self() is not None
     __nonzero__ = __bool__  # py2
     __nonzero__ = __bool__  # py2
 
 
     if not PY3:
     if not PY3:
         def __cmp__(self, other):
         def __cmp__(self, other):
-            """Compare with another reference"""
+            """Compare with another reference."""
             if not isinstance(other, self.__class__):
             if not isinstance(other, self.__class__):
                 return cmp(self.__class__, type(other))  # noqa
                 return cmp(self.__class__, type(other))  # noqa
             return cmp(self.key, other.key)              # noqa
             return cmp(self.key, other.key)              # noqa
 
 
     def __call__(self):
     def __call__(self):
-        """Return a strong reference to the bound method
+        """Return a strong reference to the bound method.
 
 
         If the target cannot be retrieved, then will
         If the target cannot be retrieved, then will
         return None, otherwise return a bound instance
         return None, otherwise return a bound instance
@@ -204,8 +204,9 @@ class BoundMethodWeakref(object):  # pragma: no cover
 
 
 
 
 class BoundNonDescriptorMethodWeakref(BoundMethodWeakref):  # pragma: no cover
 class BoundNonDescriptorMethodWeakref(BoundMethodWeakref):  # pragma: no cover
-    """A specialized :class:`BoundMethodWeakref`, for platforms where
-    instance methods are not descriptors.
+    """A specialized :class:`BoundMethodWeakref`.
+
+    For platforms where instance methods are not descriptors.
 
 
     Warning:
     Warning:
         It assumes that the function name and the target attribute name are
         It assumes that the function name and the target attribute name are
@@ -225,6 +226,7 @@ class BoundNonDescriptorMethodWeakref(BoundMethodWeakref):  # pragma: no cover
         aren't descriptors (e.g., Jython) this implementation has the
         aren't descriptors (e.g., Jython) this implementation has the
         advantage of working in the most cases.
         advantage of working in the most cases.
     """
     """
+
     def __init__(self, target, on_delete=None):
     def __init__(self, target, on_delete=None):
         """Return a weak-reference-like instance for a bound method.
         """Return a weak-reference-like instance for a bound method.
 
 
@@ -248,14 +250,13 @@ class BoundNonDescriptorMethodWeakref(BoundMethodWeakref):  # pragma: no cover
                                                               on_delete)
                                                               on_delete)
 
 
     def __call__(self):
     def __call__(self):
-        """Return a strong reference to the bound method
+        """Return a strong reference to the bound method.
 
 
         If the target cannot be retrieved, then will
         If the target cannot be retrieved, then will
         return None, otherwise return a bound instance
         return None, otherwise return a bound instance
         method for our object and function.
         method for our object and function.
 
 
         Note:
         Note:
-
             You may call this method any number of times,
             You may call this method any number of times,
             as it does not invalidate the reference.
             as it does not invalidate the reference.
         """
         """
@@ -273,8 +274,11 @@ class BoundNonDescriptorMethodWeakref(BoundMethodWeakref):  # pragma: no cover
 
 
 
 
 def get_bound_method_weakref(target, on_delete):  # pragma: no cover
 def get_bound_method_weakref(target, on_delete):  # pragma: no cover
-    """Instantiates the appropiate :class:`BoundMethodWeakRef`, depending
-    on the details of the underlying class method implementation."""
+    """Instantiate the appropiate :class:`BoundMethodWeakRef`.
+
+    Depending on the details of the underlying class method
+    implementation.
+    """
     if hasattr(target, '__get__'):
     if hasattr(target, '__get__'):
         # target method is a descriptor, so the default implementation works:
         # target method is a descriptor, so the default implementation works:
         return BoundMethodWeakref(target=target, on_delete=on_delete)
         return BoundMethodWeakref(target=target, on_delete=on_delete)

+ 2 - 1
celery/utils/dispatch/signal.py

@@ -197,7 +197,6 @@ class Signal(object):  # pragma: no cover
 
 
     def _remove_receiver(self, receiver):
     def _remove_receiver(self, receiver):
         """Remove dead receivers from connections."""
         """Remove dead receivers from connections."""
-
         to_remove = []
         to_remove = []
         for key, connected_receiver in self.receivers:
         for key, connected_receiver in self.receivers:
             if connected_receiver == receiver:
             if connected_receiver == receiver:
@@ -208,7 +207,9 @@ class Signal(object):  # pragma: no cover
                     del self.receivers[idx]
                     del self.receivers[idx]
 
 
     def __repr__(self):
     def __repr__(self):
+        """``repr(signal)``."""
         return '<Signal: {0}>'.format(type(self).__name__)
         return '<Signal: {0}>'.format(type(self).__name__)
 
 
     def __str__(self):
     def __str__(self):
+        """``str(signal)``."""
         return repr(self)
         return repr(self)

+ 17 - 10
celery/utils/functional.py

@@ -67,8 +67,7 @@ def noop(*args, **kwargs):
 
 
 
 
 def pass1(arg, *args, **kwargs):
 def pass1(arg, *args, **kwargs):
-    """Take any number of arguments/keyword arguments and return
-    the first positional argument."""
+    """Return the first positional argument."""
     return arg
     return arg
 
 
 
 
@@ -80,8 +79,7 @@ def evaluate_promises(it):
 
 
 
 
 def first(predicate, it):
 def first(predicate, it):
-    """Return the first element in ``iterable`` that ``predicate`` gives a
-    :const:`True` value for.
+    """Return the first element in ``it`` that ``predicate`` accepts.
 
 
     If ``predicate`` is None it will return the first item that's not
     If ``predicate`` is None it will return the first item that's not
     :const:`None`.
     :const:`None`.
@@ -94,13 +92,14 @@ def first(predicate, it):
 
 
 
 
 def firstmethod(method, on_call=None):
 def firstmethod(method, on_call=None):
-    """Return a function that with a list of instances,
+    """Multiple dispatch.
+
+    Return a function that with a list of instances,
     finds the first instance that gives a value for the given method.
     finds the first instance that gives a value for the given method.
 
 
     The list can also contain lazy instances
     The list can also contain lazy instances
     (:class:`~kombu.utils.functional.lazy`.)
     (:class:`~kombu.utils.functional.lazy`.)
     """
     """
-
     def _matcher(it, *args, **kwargs):
     def _matcher(it, *args, **kwargs):
         for obj in it:
         for obj in it:
             try:
             try:
@@ -150,8 +149,11 @@ def padlist(container, size, default=None):
 
 
 
 
 def mattrgetter(*attrs):
 def mattrgetter(*attrs):
-    """Like :func:`operator.itemgetter` but return :const:`None` on missing
-    attributes instead of raising :exc:`AttributeError`."""
+    """Get attributes, ignoring attribute errors.
+
+    Like :func:`operator.itemgetter` but return :const:`None` on missing
+    attributes instead of raising :exc:`AttributeError`.
+    """
     return lambda obj: {attr: getattr(obj, attr, None) for attr in attrs}
     return lambda obj: {attr: getattr(obj, attr, None) for attr in attrs}
 
 
 
 
@@ -162,9 +164,12 @@ def uniq(it):
 
 
 
 
 def regen(it):
 def regen(it):
-    """``Regen`` takes any iterable, and if the object is an
+    """Convert iterator to an object that can be consumed multiple times.
+
+    ``Regen`` takes any iterable, and if the object is an
     generator it will cache the evaluated list on first access,
     generator it will cache the evaluated list on first access,
-    so that the generator can be "consumed" multiple times."""
+    so that the generator can be "consumed" multiple times.
+    """
     if isinstance(it, (list, tuple)):
     if isinstance(it, (list, tuple)):
         return it
         return it
     return _regen(it)
     return _regen(it)
@@ -228,6 +233,7 @@ def _argsfromspec(spec, replace_defaults=True):
 
 
 
 
 def head_from_fun(fun, bound=False, debug=False):
 def head_from_fun(fun, bound=False, debug=False):
+    """Generate signature function from actual function."""
     # we could use inspect.Signature here, but that implementation
     # we could use inspect.Signature here, but that implementation
     # is very slow since it implements the argument checking
     # is very slow since it implements the argument checking
     # in pure-Python.  Instead we use exec to create a new function
     # in pure-Python.  Instead we use exec to create a new function
@@ -267,4 +273,5 @@ def fun_takes_argument(name, fun, position=None):
 
 
 
 
 def maybe(typ, val):
 def maybe(typ, val):
+    """Call typ on value if val is defined."""
     return typ(val) if val is not None else val
     return typ(val) if val is not None else val

+ 11 - 6
celery/utils/graph.py

@@ -13,6 +13,8 @@ __all__ = ['DOT', 'CycleError', 'DependencyGraph', 'GraphFormatter']
 
 
 
 
 class DOT:
 class DOT:
+    """Constants related to the dot format."""
+
     HEAD = dedent("""
     HEAD = dedent("""
         {IN}{type} {id} {{
         {IN}{type} {id} {{
         {INp}graph [{attrs}]
         {INp}graph [{attrs}]
@@ -54,8 +56,10 @@ class DependencyGraph(object):
         self.adjacent.setdefault(obj, [])
         self.adjacent.setdefault(obj, [])
 
 
     def add_edge(self, A, B):
     def add_edge(self, A, B):
-        """Add an edge from object ``A`` to object ``B``
-        (``A`` depends on ``B``)."""
+        """Add an edge from object ``A`` to object ``B``.
+
+        I.e. ``A`` depends on ``B``.
+        """
         self[A].append(B)
         self[A].append(B)
 
 
     def connect(self, graph):
     def connect(self, graph):
@@ -95,8 +99,7 @@ class DependencyGraph(object):
         return sum(l)
         return sum(l)
 
 
     def update(self, it):
     def update(self, it):
-        """Update the graph with data from a list
-        of ``(obj, dependencies)`` tuples."""
+        """Update graph with data from a list of ``(obj, deps)`` tuples."""
         tups = list(it)
         tups = list(it)
         for obj, _ in tups:
         for obj, _ in tups:
             self.add_arc(obj)
             self.add_arc(obj)
@@ -109,7 +112,7 @@ class DependencyGraph(object):
         return (obj for obj, adj in items(self) if adj)
         return (obj for obj, adj in items(self) if adj)
 
 
     def _khan62(self):
     def _khan62(self):
-        """Khans simple topological sort algorithm from '62
+        """Perform Khan's simple topological sort algorithm from '62.
 
 
         See https://en.wikipedia.org/wiki/Topological_sorting
         See https://en.wikipedia.org/wiki/Topological_sorting
         """
         """
@@ -133,7 +136,7 @@ class DependencyGraph(object):
         return result
         return result
 
 
     def _tarjan72(self):
     def _tarjan72(self):
-        """Tarjan's algorithm to find strongly connected components.
+        """Perform Tarjan's algorithm to find strongly connected components.
 
 
         See Also:
         See Also:
             http://bit.ly/vIMv3h.
             http://bit.ly/vIMv3h.
@@ -225,6 +228,8 @@ class DependencyGraph(object):
 
 
 
 
 class GraphFormatter(object):
 class GraphFormatter(object):
+    """Format dependency graphs."""
+
     _attr = DOT.ATTR.strip()
     _attr = DOT.ATTR.strip()
     _node = DOT.NODE.strip()
     _node = DOT.NODE.strip()
     _edge = DOT.EDGE.strip()
     _edge = DOT.EDGE.strip()

+ 6 - 3
celery/utils/imports.py

@@ -28,11 +28,12 @@ __all__ = [
 
 
 
 
 class NotAPackage(Exception):
 class NotAPackage(Exception):
-    pass
+    """Raised when importing a package, but it's not a package."""
 
 
 
 
 if sys.version_info > (3, 3):  # pragma: no cover
 if sys.version_info > (3, 3):  # pragma: no cover
     def qualname(obj):
     def qualname(obj):
+        """Return object name."""
         if not hasattr(obj, '__name__') and hasattr(obj, '__class__'):
         if not hasattr(obj, '__name__') and hasattr(obj, '__class__'):
             obj = obj.__class__
             obj = obj.__class__
         q = getattr(obj, '__qualname__', None)
         q = getattr(obj, '__qualname__', None)
@@ -41,6 +42,7 @@ if sys.version_info > (3, 3):  # pragma: no cover
         return q
         return q
 else:
 else:
     def qualname(obj):  # noqa
     def qualname(obj):  # noqa
+        """Return object name."""
         if not hasattr(obj, '__name__') and hasattr(obj, '__class__'):
         if not hasattr(obj, '__name__') and hasattr(obj, '__class__'):
             obj = obj.__class__
             obj = obj.__class__
         return '.'.join((obj.__module__, obj.__name__))
         return '.'.join((obj.__module__, obj.__name__))
@@ -57,6 +59,7 @@ def instantiate(name, *args, **kwargs):
 
 
 @contextmanager
 @contextmanager
 def cwd_in_path():
 def cwd_in_path():
+    """Context adding the current working directory to sys.path."""
     cwd = os.getcwd()
     cwd = os.getcwd()
     if cwd in sys.path:
     if cwd in sys.path:
         yield
         yield
@@ -91,8 +94,7 @@ def find_module(module, path=None, imp=None):
 
 
 
 
 def import_from_cwd(module, imp=None, package=None):
 def import_from_cwd(module, imp=None, package=None):
-    """Import module, but make sure it finds modules
-    located in the current directory.
+    """Import module, temporarily including modules in the current directory.
 
 
     Modules located in the current directory has
     Modules located in the current directory has
     precedence over modules located in `sys.path`.
     precedence over modules located in `sys.path`.
@@ -104,6 +106,7 @@ def import_from_cwd(module, imp=None, package=None):
 
 
 
 
 def reload_from_cwd(module, reloader=None):
 def reload_from_cwd(module, reloader=None):
+    """Reload module (ensuring that CWD is in sys.path)."""
     if reloader is None:
     if reloader is None:
         reloader = reload
         reloader = reload
     with cwd_in_path():
     with cwd_in_path():

+ 1 - 2
celery/utils/iso8601.py

@@ -54,8 +54,7 @@ TIMEZONE_REGEX = re.compile(
 
 
 
 
 def parse_iso8601(datestring):
 def parse_iso8601(datestring):
-    """Parse and convert ISO-8601 string into a
-    :class:`~datetime.datetime` object"""
+    """Parse and convert ISO-8601 string to datetime."""
     m = ISO8601_REGEX.match(datestring)
     m = ISO8601_REGEX.match(datestring)
     if not m:
     if not m:
         raise ValueError('unable to parse date string %r' % datestring)
         raise ValueError('unable to parse date string %r' % datestring)

+ 20 - 11
celery/utils/log.py

@@ -42,6 +42,7 @@ base_logger = logger = _get_logger('celery')
 
 
 
 
 def set_in_sighandler(value):
 def set_in_sighandler(value):
+    """Set flag signifiying that we're inside a signal handler."""
     global _in_sighandler
     global _in_sighandler
     _in_sighandler = value
     _in_sighandler = value
 
 
@@ -65,6 +66,7 @@ def iter_open_logger_fds():
 
 
 @contextmanager
 @contextmanager
 def in_sighandler():
 def in_sighandler():
+    """Context that records that we are in a signal handler."""
     set_in_sighandler(True)
     set_in_sighandler(True)
     try:
     try:
         yield
         yield
@@ -92,6 +94,7 @@ def logger_isa(l, p, max=1000):
 
 
 
 
 def get_logger(name):
 def get_logger(name):
+    """Get logger by name."""
     l = _get_logger(name)
     l = _get_logger(name)
     if logging.root not in (l, l.parent) and l is not base_logger:
     if logging.root not in (l, l.parent) and l is not base_logger:
         if not logger_isa(l, base_logger):  # pragma: no cover
         if not logger_isa(l, base_logger):  # pragma: no cover
@@ -102,6 +105,7 @@ worker_logger = get_logger('celery.worker')
 
 
 
 
 def get_task_logger(name):
 def get_task_logger(name):
+    """Get logger for task module by name."""
     if name in RESERVED_LOGGER_NAMES:
     if name in RESERVED_LOGGER_NAMES:
         raise RuntimeError('Logger name {0!r} is reserved!'.format(name))
         raise RuntimeError('Logger name {0!r} is reserved!'.format(name))
     logger = get_logger(name)
     logger = get_logger(name)
@@ -111,12 +115,15 @@ def get_task_logger(name):
 
 
 
 
 def mlevel(level):
 def mlevel(level):
+    """Convert level name/int to log level."""
     if level and not isinstance(level, numbers.Integral):
     if level and not isinstance(level, numbers.Integral):
         return LOG_LEVELS[level.upper()]
         return LOG_LEVELS[level.upper()]
     return level
     return level
 
 
 
 
 class ColorFormatter(logging.Formatter):
 class ColorFormatter(logging.Formatter):
+    """Logging formatter that adds colors based on severity."""
+
     #: Loglevel -> Color mapping.
     #: Loglevel -> Color mapping.
     COLORS = colored().names
     COLORS = colored().names
     colors = {
     colors = {
@@ -178,6 +185,7 @@ class LoggingProxy(object):
         logger (~logging.Logger): Logger instance to forward to.
         logger (~logging.Logger): Logger instance to forward to.
         loglevel (int, str): Log level to use when logging messages.
         loglevel (int, str): Log level to use when logging messages.
     """
     """
+
     mode = 'w'
     mode = 'w'
     name = None
     name = None
     closed = False
     closed = False
@@ -190,9 +198,9 @@ class LoggingProxy(object):
         self._safewrap_handlers()
         self._safewrap_handlers()
 
 
     def _safewrap_handlers(self):
     def _safewrap_handlers(self):
-        """Make the logger handlers dump internal errors to
-        :data:`sys.__stderr__` instead of :data:`sys.stderr` to circumvent
-        infinite loops."""
+        # Make the logger handlers dump internal errors to
+        # :data:`sys.__stderr__` instead of :data:`sys.stderr` to circumvent
+        # infinite loops.
 
 
         def wrap_handler(handler):                  # pragma: no cover
         def wrap_handler(handler):                  # pragma: no cover
 
 
@@ -223,9 +231,8 @@ class LoggingProxy(object):
                 self._thread.recurse_protection = False
                 self._thread.recurse_protection = False
 
 
     def writelines(self, sequence):
     def writelines(self, sequence):
-        """`writelines(sequence_of_strings) -> None`.
-
-        Write the strings to the file.
+        # type: (Sequence[str]) -> None
+        """Write list of strings to file.
 
 
         The sequence can be any iterable object producing strings.
         The sequence can be any iterable object producing strings.
         This is equivalent to calling :meth:`write` for each string.
         This is equivalent to calling :meth:`write` for each string.
@@ -234,21 +241,22 @@ class LoggingProxy(object):
             self.write(part)
             self.write(part)
 
 
     def flush(self):
     def flush(self):
-        """This object is not buffered so any :meth:`flush` requests
-        are ignored."""
+        # This object is not buffered so any :meth:`flush`
+        # requests are ignored.
         pass
         pass
 
 
     def close(self):
     def close(self):
-        """When the object is closed, no write requests are forwarded to
-        the logging object anymore."""
+        # when the object is closed, no write requests are
+        # forwarded to the logging object anymore.
         self.closed = True
         self.closed = True
 
 
     def isatty(self):
     def isatty(self):
-        """Always return :const:`False`.  Just here for file support."""
+        """Here for file support."""
         return False
         return False
 
 
 
 
 def get_multiprocessing_logger():
 def get_multiprocessing_logger():
+    """Return the multiprocessing logger."""
     try:
     try:
         from billiard import util
         from billiard import util
     except ImportError:  # pragma: no cover
     except ImportError:  # pragma: no cover
@@ -258,6 +266,7 @@ def get_multiprocessing_logger():
 
 
 
 
 def reset_multiprocessing_logger():
 def reset_multiprocessing_logger():
+    """Reset multiprocessing logging setup."""
     try:
     try:
         from billiard import util
         from billiard import util
     except ImportError:  # pragma: no cover
     except ImportError:  # pragma: no cover

+ 8 - 2
celery/utils/nodenames.py

@@ -33,8 +33,7 @@ __all__ = [
 
 
 
 
 def worker_direct(hostname):
 def worker_direct(hostname):
-    """Return :class:`kombu.Queue` that's a direct route to
-    a worker by hostname.
+    """Return the :class:`kombu.Queue` being a direct route to a worker.
 
 
     Arguments:
     Arguments:
         hostname (str, ~kombu.Queue): The fully qualified node name of
         hostname (str, ~kombu.Queue): The fully qualified node name of
@@ -57,6 +56,10 @@ def nodename(name, hostname):
 
 
 
 
 def anon_nodename(hostname=None, prefix='gen'):
 def anon_nodename(hostname=None, prefix='gen'):
+    """Return the nodename for this process (not a worker).
+
+    This is used for e.g. the origin task message field.
+    """
     return nodename(''.join([prefix, str(os.getpid())]),
     return nodename(''.join([prefix, str(os.getpid())]),
                     hostname or gethostname())
                     hostname or gethostname())
 
 
@@ -70,11 +73,13 @@ def nodesplit(nodename):
 
 
 
 
 def default_nodename(hostname):
 def default_nodename(hostname):
+    """Return the default nodename for this process."""
     name, host = nodesplit(hostname or '')
     name, host = nodesplit(hostname or '')
     return nodename(name or NODENAME_DEFAULT, host or gethostname())
     return nodename(name or NODENAME_DEFAULT, host or gethostname())
 
 
 
 
 def node_format(s, nodename, **extra):
 def node_format(s, nodename, **extra):
+    """Format worker node name (name@host.com)."""
     name, host = nodesplit(nodename)
     name, host = nodesplit(nodename)
     return host_format(
     return host_format(
         s, host, name or NODENAME_DEFAULT, p=nodename, **extra)
         s, host, name or NODENAME_DEFAULT, p=nodename, **extra)
@@ -88,6 +93,7 @@ _fmt_process_index_with_prefix = partial(_fmt_process_index, '-', '')
 
 
 
 
 def host_format(s, host=None, name=None, **extra):
 def host_format(s, host=None, name=None, **extra):
+    """Format host %x abbreviations."""
     host = host or gethostname()
     host = host or gethostname()
     hname, _, domain = host.partition('.')
     hname, _, domain = host.partition('.')
     name = name or hname
     name = name or hname

+ 3 - 1
celery/utils/objects.py

@@ -43,7 +43,9 @@ def mro_lookup(cls, attr, stop=set(), monkey_patched=[]):
 
 
 
 
 class FallbackContext(object):
 class FallbackContext(object):
-    """The built-in ``@contextmanager`` utility does not work well
+    """Context workaround.
+
+    The built-in ``@contextmanager`` utility does not work well
     when wrapping other contexts, as the traceback is wrong when
     when wrapping other contexts, as the traceback is wrong when
     the wrapped context raises.
     the wrapped context raises.
 
 

+ 14 - 0
celery/utils/pytest.py

@@ -47,12 +47,18 @@ CELERY_TEST_CONFIG = {
 
 
 
 
 class Trap(object):
 class Trap(object):
+    """Trap that pretends to be an app but raises an exception instead.
+
+    This to protect from code that does not properly pass app instances,
+    then falls back to the current_app.
+    """
 
 
     def __getattr__(self, name):
     def __getattr__(self, name):
         raise RuntimeError('Test depends on current_app')
         raise RuntimeError('Test depends on current_app')
 
 
 
 
 class UnitLogging(symbol_by_name(Celery.log_cls)):
 class UnitLogging(symbol_by_name(Celery.log_cls)):
+    """Sets up logging for the test application."""
 
 
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
         super(UnitLogging, self).__init__(*args, **kwargs)
         super(UnitLogging, self).__init__(*args, **kwargs)
@@ -61,6 +67,7 @@ class UnitLogging(symbol_by_name(Celery.log_cls)):
 
 
 def TestApp(name=None, set_as_current=False, log=UnitLogging,
 def TestApp(name=None, set_as_current=False, log=UnitLogging,
             broker='memory://', backend='cache+memory://', **kwargs):
             broker='memory://', backend='cache+memory://', **kwargs):
+    """App used for testing."""
     app = Celery(name or 'celery.tests',
     app = Celery(name or 'celery.tests',
                  set_as_current=set_as_current,
                  set_as_current=set_as_current,
                  log=log, broker=broker, backend=backend,
                  log=log, broker=broker, backend=backend,
@@ -71,6 +78,7 @@ def TestApp(name=None, set_as_current=False, log=UnitLogging,
 
 
 @pytest.fixture(autouse=True)
 @pytest.fixture(autouse=True)
 def app(request):
 def app(request):
+    """Fixture creating a Celery application instance."""
     from celery import _state
     from celery import _state
     prev_current_app = current_app()
     prev_current_app = current_app()
     prev_default_app = _state.default_app
     prev_default_app = _state.default_app
@@ -106,11 +114,13 @@ def app(request):
 
 
 @pytest.fixture()
 @pytest.fixture()
 def depends_on_current_app(app):
 def depends_on_current_app(app):
+    """Fixture that sets app as current."""
     app.set_current()
     app.set_current()
 
 
 
 
 @pytest.fixture(autouse=True)
 @pytest.fixture(autouse=True)
 def reset_cache_backend_state(app):
 def reset_cache_backend_state(app):
+    """Fixture that resets the internal state of the cache result backend."""
     yield
     yield
     backend = app.__dict__.get('backend')
     backend = app.__dict__.get('backend')
     if backend is not None:
     if backend is not None:
@@ -122,6 +132,7 @@ def reset_cache_backend_state(app):
 
 
 @decorator
 @decorator
 def assert_signal_called(signal, **expected):
 def assert_signal_called(signal, **expected):
+    """Context that verifes signal is called before exiting."""
     handler = Mock()
     handler = Mock()
     call_handler = partial(handler)
     call_handler = partial(handler)
     signal.connect(call_handler)
     signal.connect(call_handler)
@@ -134,6 +145,7 @@ def assert_signal_called(signal, **expected):
 
 
 def TaskMessage(name, id=None, args=(), kwargs={}, callbacks=None,
 def TaskMessage(name, id=None, args=(), kwargs={}, callbacks=None,
                 errbacks=None, chain=None, shadow=None, utc=None, **options):
                 errbacks=None, chain=None, shadow=None, utc=None, **options):
+    """Create task message in protocol 2 format."""
     from celery import uuid
     from celery import uuid
     from kombu.serialization import dumps
     from kombu.serialization import dumps
     id = id or uuid()
     id = id or uuid()
@@ -154,6 +166,7 @@ def TaskMessage(name, id=None, args=(), kwargs={}, callbacks=None,
 
 
 def TaskMessage1(name, id=None, args=(), kwargs={}, callbacks=None,
 def TaskMessage1(name, id=None, args=(), kwargs={}, callbacks=None,
                  errbacks=None, chain=None, **options):
                  errbacks=None, chain=None, **options):
+    """Create task message in protocol 1 format."""
     from celery import uuid
     from celery import uuid
     from kombu.serialization import dumps
     from kombu.serialization import dumps
     id = id or uuid()
     id = id or uuid()
@@ -175,6 +188,7 @@ def TaskMessage1(name, id=None, args=(), kwargs={}, callbacks=None,
 
 
 
 
 def task_message_from_sig(app, sig, utc=True, TaskMessage=TaskMessage):
 def task_message_from_sig(app, sig, utc=True, TaskMessage=TaskMessage):
+    """Create task message from :class:`celery.Signature`."""
     sig.freeze()
     sig.freeze()
     callbacks = sig.options.pop('link', None)
     callbacks = sig.options.pop('link', None)
     errbacks = sig.options.pop('link_error', None)
     errbacks = sig.options.pop('link_error', None)

+ 8 - 1
celery/utils/saferepr.py

@@ -1,7 +1,6 @@
 # -*- coding: utf-8 -*-
 # -*- coding: utf-8 -*-
 """Streaming, truncating, non-recursive version of :func:`repr`.
 """Streaming, truncating, non-recursive version of :func:`repr`.
 
 
-
 Differences from regular :func:`repr`:
 Differences from regular :func:`repr`:
 
 
 - Sets are represented the Python 3 way: ``{1, 2}`` vs ``set([1, 2])``.
 - Sets are represented the Python 3 way: ``{1, 2}`` vs ``set([1, 2])``.
@@ -61,6 +60,13 @@ LIT_TUPLE_END_SV = _literal(',)', False, -1)
 
 
 
 
 def saferepr(o, maxlen=None, maxlevels=3, seen=None):
 def saferepr(o, maxlen=None, maxlevels=3, seen=None):
+    """Safe version of :func:`repr`.
+
+    Warning:
+        Make sure you set the maxlen argument, or it will be very slow
+        for recursive objects.  With the maxlen set, it's often faster
+        than built-in repr.
+    """
     return ''.join(_saferepr(
     return ''.join(_saferepr(
         o, maxlen=maxlen, maxlevels=maxlevels, seen=seen
         o, maxlen=maxlen, maxlevels=maxlevels, seen=seen
     ))
     ))
@@ -132,6 +138,7 @@ def _reprseq(val, lit_start, lit_end, builtin_type, chainer):
 
 
 
 
 def reprstream(stack, seen=None, maxlevels=3, level=0, isinstance=isinstance):
 def reprstream(stack, seen=None, maxlevels=3, level=0, isinstance=isinstance):
+    """Streaming repr, yielding tokens."""
     seen = seen or set()
     seen = seen or set()
     append = stack.append
     append = stack.append
     popleft = stack.popleft
     popleft = stack.popleft

+ 11 - 6
celery/utils/serialization.py

@@ -41,12 +41,15 @@ except NameError:  # pragma: no cover
 
 
 
 
 def subclass_exception(name, parent, module):  # noqa
 def subclass_exception(name, parent, module):  # noqa
+    """Create new exception class."""
     return type(bytes_if_py2(name), (parent,), {'__module__': module})
     return type(bytes_if_py2(name), (parent,), {'__module__': module})
 
 
 
 
 def find_pickleable_exception(exc, loads=pickle.loads,
 def find_pickleable_exception(exc, loads=pickle.loads,
                               dumps=pickle.dumps):
                               dumps=pickle.dumps):
-    """With an exception instance, iterate over its super classes (by MRO)
+    """Find first pickleable exception base class.
+
+    With an exception instance, iterate over its super classes (by MRO)
     and find the first super exception that's pickleable.  It does
     and find the first super exception that's pickleable.  It does
     not go below :exc:`Exception` (i.e., it skips :exc:`Exception`,
     not go below :exc:`Exception` (i.e., it skips :exc:`Exception`,
     :class:`BaseException` and :class:`object`).  If that happens
     :class:`BaseException` and :class:`object`).  If that happens
@@ -157,6 +160,7 @@ def get_pickleable_exception(exc):
 
 
 
 
 def get_pickleable_etype(cls, loads=pickle.loads, dumps=pickle.dumps):
 def get_pickleable_etype(cls, loads=pickle.loads, dumps=pickle.dumps):
+    """Get pickleable exception type."""
     try:
     try:
         loads(dumps(cls))
         loads(dumps(cls))
     except:
     except:
@@ -166,8 +170,7 @@ def get_pickleable_etype(cls, loads=pickle.loads, dumps=pickle.dumps):
 
 
 
 
 def get_pickled_exception(exc):
 def get_pickled_exception(exc):
-    """Get original exception from exception pickled using
-    :meth:`get_pickleable_exception`."""
+    """Reverse of :meth:`get_pickleable_exception`."""
     if isinstance(exc, UnpickleableExceptionWrapper):
     if isinstance(exc, UnpickleableExceptionWrapper):
         return exc.restore()
         return exc.restore()
     return exc
     return exc
@@ -184,8 +187,10 @@ def b64decode(s):
 def strtobool(term, table={'false': False, 'no': False, '0': False,
 def strtobool(term, table={'false': False, 'no': False, '0': False,
                            'true': True, 'yes': True, '1': True,
                            'true': True, 'yes': True, '1': True,
                            'on': True, 'off': False}):
                            'on': True, 'off': False}):
-    """Convert common terms for true/false to bool
-    (true/false/yes/no/on/off/1/0)."""
+    """Convert common terms for true/false to bool.
+
+    Examples (true/false/yes/no/on/off/1/0).
+    """
     if isinstance(term, string_t):
     if isinstance(term, string_t):
         try:
         try:
             return table[term.lower()]
             return table[term.lower()]
@@ -198,7 +203,7 @@ def jsonify(obj,
             builtin_types=(numbers.Real, string_t), key=None,
             builtin_types=(numbers.Real, string_t), key=None,
             keyfilter=None,
             keyfilter=None,
             unknown_type_filter=None):
             unknown_type_filter=None):
-    """Transforms object making it suitable for json serialization"""
+    """Transform object making it suitable for json serialization."""
     from kombu.abstract import Object as KombuDictType
     from kombu.abstract import Object as KombuDictType
     _jsonify = partial(jsonify, builtin_types=builtin_types, key=key,
     _jsonify = partial(jsonify, builtin_types=builtin_types, key=key,
                        keyfilter=keyfilter,
                        keyfilter=keyfilter,

+ 5 - 0
celery/utils/static/__init__.py

@@ -1,11 +1,16 @@
+"""Static files."""
 from __future__ import absolute_import, unicode_literals
 from __future__ import absolute_import, unicode_literals
 
 
 import os
 import os
 
 
 
 
 def get_file(*args):
 def get_file(*args):
+    # type: (*str) -> str
+    """Get filename for static file."""
     return os.path.join(os.path.abspath(os.path.dirname(__file__)), *args)
     return os.path.join(os.path.abspath(os.path.dirname(__file__)), *args)
 
 
 
 
 def logo():
 def logo():
+    # type: () -> bytes
+    """Celery logo image."""
     return get_file('celery_128.png')
     return get_file('celery_128.png')

+ 8 - 2
celery/utils/sysinfo.py

@@ -13,16 +13,22 @@ __all__ = ['load_average', 'df']
 
 
 if hasattr(os, 'getloadavg'):
 if hasattr(os, 'getloadavg'):
 
 
-    def load_average():
+    def _load_average():
         return tuple(ceil(l * 1e2) / 1e2 for l in os.getloadavg())
         return tuple(ceil(l * 1e2) / 1e2 for l in os.getloadavg())
 
 
 else:  # pragma: no cover
 else:  # pragma: no cover
     # Windows doesn't have getloadavg
     # Windows doesn't have getloadavg
-    def load_average():  # noqa
+    def _load_average():  # noqa
         return (0.0, 0.0, 0.0)
         return (0.0, 0.0, 0.0)
 
 
 
 
+def load_average():
+    """Return system load average as a triple."""
+    return _load_average()
+
+
 class df(object):
 class df(object):
+    """Disk information."""
 
 
     def __init__(self, path):
     def __init__(self, path):
         self.path = path
         self.path = path

+ 12 - 1
celery/utils/text.py

@@ -29,33 +29,40 @@ RE_FORMAT = re.compile(r'%(\w)')
 
 
 
 
 def str_to_list(s):
 def str_to_list(s):
+    """Convert string to list."""
     if isinstance(s, string_t):
     if isinstance(s, string_t):
         return s.split(',')
         return s.split(',')
     return s
     return s
 
 
 
 
 def dedent_initial(s, n=4):
 def dedent_initial(s, n=4):
+    """Remove identation from first line of text."""
     return s[n:] if s[:n] == ' ' * n else s
     return s[n:] if s[:n] == ' ' * n else s
 
 
 
 
 def dedent(s, n=4, sep='\n'):
 def dedent(s, n=4, sep='\n'):
+    """Remove identation."""
     return sep.join(dedent_initial(l) for l in s.splitlines())
     return sep.join(dedent_initial(l) for l in s.splitlines())
 
 
 
 
 def fill_paragraphs(s, width, sep='\n'):
 def fill_paragraphs(s, width, sep='\n'):
+    """Fill paragraphs with newlines (or custom separator)."""
     return sep.join(fill(p, width) for p in s.split(sep))
     return sep.join(fill(p, width) for p in s.split(sep))
 
 
 
 
 def join(l, sep='\n'):
 def join(l, sep='\n'):
+    """Concatenate list of strings."""
     return sep.join(v for v in l if v)
     return sep.join(v for v in l if v)
 
 
 
 
 def ensure_sep(sep, s, n=2):
 def ensure_sep(sep, s, n=2):
+    """Ensure text s ends in separator sep'."""
     return s + sep * (n - s.count(sep))
     return s + sep * (n - s.count(sep))
 ensure_newlines = partial(ensure_sep, '\n')
 ensure_newlines = partial(ensure_sep, '\n')
 
 
 
 
 def abbr(S, max, ellipsis='...'):
 def abbr(S, max, ellipsis='...'):
+    """Abbreviate word."""
     if S is None:
     if S is None:
         return '???'
         return '???'
     if len(S) > max:
     if len(S) > max:
@@ -64,6 +71,7 @@ def abbr(S, max, ellipsis='...'):
 
 
 
 
 def abbrtask(S, max):
 def abbrtask(S, max):
+    """Abbreviate task name."""
     if S is None:
     if S is None:
         return '???'
         return '???'
     if len(S) > max:
     if len(S) > max:
@@ -79,7 +87,7 @@ def indent(t, indent=0, sep='\n'):
 
 
 
 
 def truncate(s, maxlen=128, suffix='...'):
 def truncate(s, maxlen=128, suffix='...'):
-    """Truncates text to a maximum number of characters."""
+    """Truncate text to a maximum number of characters."""
     if maxlen and len(s) >= maxlen:
     if maxlen and len(s) >= maxlen:
         return s[:maxlen].rsplit(' ', 1)[0] + suffix
         return s[:maxlen].rsplit(' ', 1)[0] + suffix
     return s
     return s
@@ -92,12 +100,14 @@ def truncate_bytes(s, maxlen=128, suffix=b'...'):
 
 
 
 
 def pluralize(n, text, suffix='s'):
 def pluralize(n, text, suffix='s'):
+    """Pluralize term when n is greater than one."""
     if n != 1:
     if n != 1:
         return text + suffix
         return text + suffix
     return text
     return text
 
 
 
 
 def pretty(value, width=80, nl_width=80, sep='\n', **kw):
 def pretty(value, width=80, nl_width=80, sep='\n', **kw):
+    """Format value for printing to console."""
     if isinstance(value, dict):
     if isinstance(value, dict):
         return '{{{0} {1}'.format(sep, pformat(value, 4, nl_width)[1:])
         return '{{{0} {1}'.format(sep, pformat(value, 4, nl_width)[1:])
     elif isinstance(value, tuple):
     elif isinstance(value, tuple):
@@ -113,6 +123,7 @@ def match_case(s, other):
 
 
 
 
 def simple_format(s, keys, pattern=RE_FORMAT, expand=r'\1'):
 def simple_format(s, keys, pattern=RE_FORMAT, expand=r'\1'):
+    """Format string, expanding abbreviations in keys'."""
     if s:
     if s:
         keys.setdefault('%', '%')
         keys.setdefault('%', '%')
 
 

Some files were not shown because too many files changed in this diff